././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/0000775000175000017500000000000000000000000012405 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/.coveragerc0000664000175000017500000000014100000000000014522 0ustar00zuulzuul00000000000000[run] branch = True source = pbr omit = pbr/tests/* [report] ignore_errors = True precision = 2 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/.mailmap0000664000175000017500000000041700000000000014030 0ustar00zuulzuul00000000000000# Format is: # # Davanum Srinivas Erik M. Bray Erik Bray Zhongyue Luo ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/.pre-commit-config.yaml0000664000175000017500000000252300000000000016670 0ustar00zuulzuul00000000000000# We from the Oslo project decided to pin repos based on the # commit hash instead of the version tag to prevend arbitrary # code from running in developer's machines. To update to a # newer version, run `pre-commit autoupdate` and then replace # the newer versions with their commit hash. default_language_version: python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: 9136088a246768144165fcc3ecc3d31bb686920a # v3.3.0 hooks: - id: trailing-whitespace # Replaces or checks mixed line ending - id: mixed-line-ending args: ['--fix', 'lf'] exclude: '.*\.(svg)$' # Forbid files which have a UTF-8 byte-order marker - id: check-byte-order-marker # Checks that non-binary executables have a proper shebang - id: check-executables-have-shebangs # Check for files that contain merge conflict strings. - id: check-merge-conflict # Check for debugger imports and py37+ breakpoint() # calls in python source - id: debug-statements - id: check-yaml files: .*\.(yaml|yml)$ - repo: local hooks: - id: flake8 name: flake8 additional_dependencies: - hacking>=4.0.0,<4.1.0 language: python entry: flake8 files: '^.*\.py$' exclude: '^(doc|releasenotes|tools)/.*$' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/.stestr.conf0000664000175000017500000000005300000000000014654 0ustar00zuulzuul00000000000000[DEFAULT] test_path=./pbr/tests top_dir=./ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/.zuul.yaml0000664000175000017500000001124200000000000014346 0ustar00zuulzuul00000000000000- job: name: pbr-installation-openstack-base timeout: 5400 description: | Base job for pbr jobs that install openstack packages with current pbr. This ensures we don't break our ability to install openstack. required-projects: # TODO update this list with current active python projects - openstack/pbr - openstack/tripleo-ci - openstack/aodh - openstack/automaton - openstack/ceilometer - openstack/ceilometermiddleware - openstack/cinder - openstack/cliff - openstack/debtcollector - openstack/dib-utils - openstack/diskimage-builder - openstack/futurist - openstack/glance - openstack/glance_store - openstack/heat - openstack/heat-cfntools - openstack/heat-templates - openstack/horizon - openstack/ironic - openstack/ironic-lib - openstack/ironic-python-agent - openstack/keystone - openstack/keystoneauth - openstack/keystonemiddleware - openstack/manila - openstack/manila-ui - openstack/neutron - openstack/neutron-vpnaas - openstack/nova - openstack/octavia - openstack/os-apply-config - openstack/os-brick - openstack/os-client-config - openstack/os-collect-config - openstack/os-net-config - openstack/os-refresh-config - openstack/osc-lib - openstack/oslo.cache - openstack/oslo.concurrency - openstack/oslo.config - openstack/oslo.context - openstack/oslo.db - openstack/oslo.i18n - openstack/oslo.log - openstack/oslo.messaging - openstack/oslo.middleware - openstack/oslo.policy - openstack/oslo.reports - openstack/oslo.rootwrap - openstack/oslo.serialization - openstack/oslo.service - openstack/oslo.utils - openstack/oslo.versionedobjects - openstack/oslo.vmware - openstack/pycadf - openstack/python-cinderclient - openstack/python-glanceclient - openstack/python-heatclient - openstack/python-ironicclient - openstack/python-keystoneclient - openstack/python-manilaclient - openstack/python-neutronclient - openstack/python-novaclient - openstack/python-openstackclient - openstack/python-saharaclient - openstack/python-swiftclient - openstack/python-troveclient - openstack/python-zaqarclient - openstack/requirements - openstack/sahara - openstack/sahara-dashboard - openstack/stevedore - openstack/swift - openstack/taskflow - openstack/tempest - openstack/tooz - openstack/tripleo-heat-templates - openstack/tripleo-image-elements - openstack/trove - openstack/trove-dashboard - openstack/zaqar - job: name: pbr-installation-openstack parent: pbr-installation-openstack-base pre-run: playbooks/pbr-installation-openstack/pre.yaml run: playbooks/pbr-installation-openstack/run.yaml vars: pbr_pip_version: '' - job: name: pbr-installation-openstack-pip-dev description: | This job runs the pbr installations with pip trunk. parent: pbr-installation-openstack vars: pbr_pip_version: 'git+https://github.com/pypa/pip.git#egg=pip' - job: name: pbr-installation-openstack-focal parent: pbr-installation-openstack nodeset: ubuntu-focal description: | Run pbr integration testing on older Ubuntu which allows us to test compatibility with older versions of pip and setuptools. - project: templates: - lib-forward-testing - lib-forward-testing-python3 - periodic-stable-jobs - publish-openstack-docs-pti check: jobs: - openstack-tox-pep8 - openstack-tox-cover - openstack-tox-py27 - openstack-tox-py36 - openstack-tox-py37 - openstack-tox-py38 - openstack-tox-py39 - pbr-installation-openstack - pbr-installation-openstack-focal - pbr-installation-openstack-pip-dev - tempest-full: override-checkout: stable/train gate: jobs: - openstack-tox-pep8 - openstack-tox-cover - openstack-tox-py27 - openstack-tox-py36 - openstack-tox-py37 - openstack-tox-py38 - openstack-tox-py39 - pbr-installation-openstack - pbr-installation-openstack-focal - pbr-installation-openstack-pip-dev - tempest-full: override-checkout: stable/train periodic: jobs: - pbr-installation-openstack - pbr-installation-openstack-pip-dev - tempest-full: override-checkout: stable/train ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/AUTHORS0000664000175000017500000001351700000000000013464 0ustar00zuulzuul00000000000000A40351 Akihiro Motoki Alex Gaynor Alexander Makarov Alfredo Moralejo Andreas Jaeger Andreas Jaeger Andrew Bogott Angus Salkeld Anthony Young Antoine Musso Attila Fazekas Ben Greiner Ben Nemec Bhuvan Arumugam Brandon LeBlanc Brant Knudson Brian Waldon Cao Xuan Hoang Chang Bo Guo ChangBo Guo(gcb) Chris Dent Chris Dohmen Christian Berendt Chuck Short Clark Boylan Claudiu Popa Corey Bryant Dan Prince Daniel Bengtsson Darragh Bailey Davanum Srinivas Dave Walker (Daviey) David Ripton David Stanek Dennis Verspuij Devananda van der Veen Dirk Mueller Doug Hellmann Doug Hellmann Doug Hellmann Dougal Matthews Elena Ezhova Eoghan Glynn Eric Windisch Erik M. Bray Eugene Kirpichov Florian Wilhelm Gaetan Semet Gage Hugo Gary Kotton Giampaolo Lauria Hervé Beraud Ian Cordasco Ian Wienand Ian Y. Choi Ionuț Arțăriși James E. Blair James Polley Jason Kölker Jason R. Coombs Jay Pipes Jeremy Stanley Jiri Podivin Joe D'Andrea Joe Gordon Joe Gordon Joe Heck Johannes Erdfelt Joshua Harlow Joshua Harlow Joshua Harlow Julien Danjou Kevin McCarthy Khai Do Laurence Miao Lucian Petrut Luo Gangyi Marc Abramowitz Mark McLoughlin Mark Sienkiewicz Martin Domke Maru Newby Masaki Matsushita Matt Riedemann Matthew Montgomery Matthew Treinish Matthew Treinish Mehdi Abaakouk Michael Basnight Michael Still Mike Heald Moises Guimaraes de Medeiros Monty Taylor Nikhil Manchanda Octavian Ciuhandu Ondřej Nový Paul Belanger Rajaram Mallya Rajath Agasthya Ralf Haferkamp Randall Nortman Rick Harris Robert Collins Robert Myers Roger Luethi Ronald Bradford Ruby Loo Russell Bryant Ryan Bourgeois Ryan Petrello Sachi King Sascha Peilicke Sean Dague Sean Dague Sean McGinnis Sergey Lukjanov Slawek Kaplonski Sorin Sbarnea Stephen Finucane Stephen Finucane Steve Kowalik Steve Martinelli Steven Hardy Thomas Bechtold Thomas Goirand Thomas Grainger Thomas Herve Thomas Leaman Thomas Morin Tim Burke Tim Simpson Timothy Chavez Toilal Vasudev Kamath Vincent Untz Vishvananda Ishaya Wei Tie Will Szumski YAMAMOTO Takashi Yaguang Tang Yuriy Taraday Zhongyue Luo alexpilotti cbjchen@cn.ibm.com dineshbhor jiansong lifeless ljhuang manchandavishal melanie witt melissaml nizam qingszhao weiweigu wu.shiming xuanyandong yangyawei zhangyangyang zhangyanxian ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/CONTRIBUTING.rst0000664000175000017500000000137700000000000015056 0ustar00zuulzuul00000000000000If you would like to contribute to the development of OpenStack, you must follow the steps in this page: https://docs.opendev.org/opendev/infra-manual/latest/developers.html Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: https://docs.opendev.org/opendev/infra-manual/latest/developers.html#development-workflow Release notes are managed through the tool `reno `_. This tool will create a new file under the directory ``releasenotes`` that should be checked in with the code changes. Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/pbr ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/ChangeLog0000664000175000017500000010234100000000000014160 0ustar00zuulzuul00000000000000CHANGES ======= 5.11.1 ------ * Run PBR integration on Ubuntu Focal too * Remove numpy dependencies * Tie recursion calls to Dist object, not module * Update tox.ini to work with tox 4 5.11.0 ------ * Fix symbol identification in multiline message * Replace deprecated readfp method with read\_file 5.10.0 ------ * Specify Changelog procedure * Allow leading spaces when determining symbols * Use stdlib importlib.metadata where possible * Adding python classifiers py38 & py39 5.9.0 ----- * Future-proofing pyproject.toml * Use importlib-metadata for runtime package version lookups * Drop wheel from pyproject.toml examples * Changed minversion in tox to 3.18.0 5.8.1 ----- * Add release note about missing pbr.json fix * Avoid recursive calls into SetupTools entrypoint * setup.cfg: Replace dashes with underscores * remove explicit mock * Don't test with setuptools local distutils * Use context blocks for open() calls in packaging 5.8.0 ----- * Add python2 testing back to PBR * Allow PEP517 without setup\_requires * Clarify the need for setup.py with PEP517 5.7.0 ----- * Add a PEP517 interface * PBR package testing improvements * Run python3.9 test jobs * Retire django-openstack-auth * Upgrade the pre-commit-hooks version * Correct comment for 'D1\_D2\_SETUP\_ARGS' 5.6.0 ----- * Reverse ordering of 'D1\_D2\_SETUP\_ARGS' * Add test for cfg -> py transformation * Don't pass empty 'long\_description' * Move flake8 as a pre-commit local target * Map requires-python to python-requires (attempt 2) * Update dependencies to avoid failure with old pip * Increase OS\_TEST\_TIMEOUT to 1200 * Prevent test failure due to use of setpref * util: Convert 'D1\_D2\_SETUP\_ARGS' to a list of tuples * Revert "Map requires-python to python-requires" * Dropping lower constraints testing * Adding pre-commit 5.5.1 ----- * Run tempest-full for stable/train * Remove use\_2to3 backward compat for Setuptools * More easy\_install.ScriptWriter.get\_header() 5.5.0 ----- * Remove bdist\_wininst support * Increase integration test timeout * Add Release Notes to documentation * Cleanup old legacy devstack-gate jobs * Begin work to modernize pbr's integration testing * Re-add ChangeLog * Update some url to use opendev.org * Support newer openstackdocstheme * Use easy\_install.ScriptWriter.get\_header() * Remove neutron-fwaas from the jobs' required project * Update python requires packaging metadata for package * trivial: Improve logging of run commands * Map requires-python to python-requires * Update hacking * Add support for virtualenv 20.x 5.4.5 ----- * Switch to Ussuri jobs 5.4.4 ----- * trivial: Use 'open' context manager * Update to latest hacking * Mark strings as raw 5.4.3 ----- 5.4.2 ----- * Add Python 3 Train unit tests 5.4.1 ----- * Fix parsing on egg names with dashes from git URLs 5.4.0 ----- * option to print only the version of a package 5.3.1 ----- * Resolve some issue with tox.ini, setup.cfg 5.3.0 ----- * Stop using pbr sphinx integration * Switch to release.o.o for constraints * Make WSGI tests listen on localhost * Fix Windows support * Allow git-tags to be SemVer compliant * Read description file as utf-8 5.2.1 ----- * Add openstack-tox-py37 job * Set subparser argument required * Remove neutron-lbaas * Install more dependencies for integration testing * Use more verbose logging for test failure * Update Sphinx requirement * Fix white space handling in file names 5.2.0 ----- * OpenDev Migration Patch * Typo fix: s/extract\_mesages/extract\_messages/ * Support provides\_extra metadata * Replace openstack.org git:// URLs with https:// * Fix nits and typos on release note message * Remove libzmq-dev from integration.sh package install * Fix error when keywords are defined as a list in cfg 5.1.3 ----- * Resolve \`\`ValueError\`\` when mapping value contains a literal \`\`=\`\` * Change openstack-dev to openstack-discuss 5.1.2 ----- * Ignore --find-links in requirements file * Do not globally replace path prefix * Change openstack-dev to openstack-discuss 5.1.1 ----- * Fix incorrect use of flake8:noqa * Correct documentation hyperlink for environment-markers 5.1.0 ----- 5.0.0 ----- * Use templates for cover and lower-constraints * Special case long\_description\_content\_type * tox: Suppress output * Support wheel 0.32.0+ 4.3.0 ----- * Remove my\_ip from generated wsgi script * Add an option to skip generating RELEASENOTES.rst * docs: Add docs for reno integration * Skip test for testr hook being installed when testr is not available * Fix typo in contribution instructions * Add release note for fix to bug 1786306 * Move pbr-installation jobs in-tree * Support subdirectory in the url * remove pypy jobs * add lib-forward-testing-python3 test job * add python 3.6 unit test job * switch documentation job to new PTI * import zuul job settings from project-config * Ignore Zuul when generating AUTHORS * packaging: Remove support for pyN requirement files * tox: Re-add cover target 4.2.0 ----- * Deprecate 'test' integration * Deprecate 'build\_sphinx' integration * Add Sphinx extension * doc: Add documentation for missing '[pbr]' options * doc: Fix formatting of packagers guide * Switch to stestr 4.1.1 ----- * trivial: Fix file permissions * trivial: Remove 'tools/releasenotes\_tox.sh' * Add docstring from LocalManifestMaker.add\_defaults 4.1.0 ----- * Update python3 versions in tox.ini envlist * fix tox python3 overrides * Support python-requires metadata 4.0.4 ----- * Revert "Remove win32/nt checks for wrapper script gen" * Add leading 0 on alpha release in semver doc 4.0.3 ----- * Don't poke in pip for requests * Fix builddoc with sphinx <= 1.6 4.0.2 ----- 4.0.1 ----- * add lower-constraints job * Explicitly read setup.cfg as utf-8 on Python 3 4.0.0 ----- * builddoc: Treat '[pbr] autodoc\_tree\_excludes' as a multi-line opt * update parse test to use reliable comparison * Better Sem-Ver header handling * Make docs on env vars a little clearer * Updated from global requirements * Updated from global requirements * future-proof invocation of apidoc * emit warning correctly * Updated from global requirements * deprecations: Deprecate support for '-py{N}' requirements * doc: Minor rework of usage doc * doc: Rework features doc * Support v version * Deprecate testr and nose integration * tests: Increase coverage of requirements parsing * trivial: Move packaging tests to test\_packaging * Put test-requirements into an extra named 'test' * Support Description-Content-Type metadata * Avoid tox\_install.sh for constraints support * Test on Python 3.6 * Support PEP 345 Project-URL metadata * Remove setting of version/release from releasenotes * Updated from global requirements * Use 'build\_reno' setuptools extension if available * Remove unnecessary 'if True' * Discover Distribution through the class hierarchy * Add reno for release notes management * Remove support for command hooks * Remove dead code * Deprecate support for Sphinx < 1.6 * builddoc: Use '[sphinx\_build] builders' with Sphinx < 1.6 * Remove win32/nt checks for wrapper script gen * Updated from global requirements * Remove py26 support * Updated from global requirements * Updated from global requirements * Updated from global requirements * Update URLs in documents according to document migration * Updated from global requirements * gitignore: Ignore .venv * switch from oslosphinx to openstackdocstheme * Trivial: Fix docstring * turn on warning-as-error flag for doc build * rearrange existing documentation using the new standard layout 3.1.1 ----- * Restore previous IP binding * docs: Don't specify pbr version in 'setup.py' 3.1.0 ----- * allow user to override the output location of api docs * fix tests based on API change in Sphinx * Updated from global requirements * Add binding option for WSGI server * Ignore index URL lines in requirements.txt files 3.0.1 ----- * builddoc: uses the new Sphinx 1.6 code for multiple builder * Updated from global requirements * Fix missing comment from previous change * trivial: Add note about multiple builders support 3.0.0 ----- * Remove 'build\_sphinx\_latex' * Stop building man pages by default * docs: Use definition lists * add image.nonlocal\_uri to the list of warnings ignored * doc: Document Sphinx integration * add changelog to published documentation 2.1.0 ----- * Lazy import pkg\_resources * Add Changelog build handling for invalid chars * Initialize sphinx config using application instead of config * Make oslosphinx optional * Updated from global requirements * Also generate XML coverage report * Update to a newer hacking library 2.0.0 ----- * tox: Don't set skipsdist=True * Stop using 'warnerrors' * doc: Clarify sections in 'setup.cfg' * Remove testr entry point * Updated from global requirements * Remove discover from test-requirements * Add Constraints support * Don't raise exception on missing man pages * Updated from global requirements * Clean imports in code * Updated from global requirements * Docstrings should not start with a space * Changed the home-page link * Update .coveragerc after the removal of openstack directory * coverage package name option, doc improvement * Updated from global requirements * Deprecated warning for SafeConfigParser * Add more words to a confusing error message * Don't ignore data-files * Change assertTrue(isinstance()) by optimal assert * Fix handling of old git log output * Fix typo in the index.rst * Expose deb version to match exposing rpm version * Replace OpenStack LLC with OpenStack Foundation * Updated from global requirements * Fix pypy soabi tests * Add Python 3.5 classifier and venv * Fix argument order for assertEqual to (expected, observed) * Move to oslosphinx * Updated from global requirements * Restore warnerrors behavior and support Sphinx 1.4 * Updated from global requirements * Updated from global requirements * Updated from global requirements * Fix pypy wsgi tests * Remember the insertion order for pbr.json 1.10.0 ------ * File is wrongly marked as executable * Fix wsgiref script use with oslo.config * Update Preversioning explanation to avoid double that 1.9.1 ----- * Handle IndexError during version string parsing * Correct server test 1.9.0 ----- * Updated from global requirements * Fix soabi tests with pypy * package: fix wrong catch in email parsing * Sort 'pbr freeze' output * Do not convert git tags when searching history * Include wsgi\_scripts in generated wheels * Correct the indentation in the classifiers example * Do not silently drop markers that fail to evaluate * Clarifications around tags and version numbers * Correct typo - s/enabeld/enabled/ * Use apt-cache generated packages to provide build deps * fix some variable names * Don't attempt to test with 0.6c11 with Py3 * Support entry point patching on setuptools < 12 * Updated from global requirements * Split changelog on nulls instead of ( * Add libjpeg and liberasurecode for tests * Handle markers to support sdist on pip < 6 * Deprecated tox -downloadcache option removed * passenv integration environment variables re-enabling integration tests * Enable pep8 H405 tests * Add patch to properly get all commands from dist * doc: Remove 'MANIFEST.in' * doc: Trivial cleanup of 'index.rst' * doc: Add deprecation note for 'requirements-pyN' * doc: Restructure 'Requirements' section * doc: Restructure 'Usage' section * doc: Add details of manifest generation * Support git://, git+ssh://, git+https:// without -e flag * More support Sphinx >=1.3b1 and <1.3.1 * Fix docs for markers * Do not error when running pep8 with py3 * Ensure changelog log output is written if it already exists * Cleanup jeepyb and pypi-mirror special casing 1.8.1 ----- * Handle the case where cmd.distribution has no pbr attribute * test\_integration cleanups * Remove redundant testscenarios glue * Add standard code coverage configuration file * Add shields.io version/downloads links/badges into README.rst 1.8.0 ----- * Strip comments present in setup.cfg * Protect WSGI application with a critical section 1.7.0 ----- * Have pbr egg\_info.writer check PBR is enabled 1.6.0 ----- * Strip comments in requirements files * Support Sphinx >=1.3 new protoype and warnings 1.5.0 ----- * Handle git being entirely absent * We require the use of setuptools * Fix retrieval of commit data and most recent tag 1.4.0 ----- * Fix docs for environment markers in extras * Export ChangeLog and AUTHORS in install * Updated from global requirements * Updated from global requirements * Show how long the git querying takes * Add wsgi\_scripts support to PBR * Updated from global requirements 1.3.0 ----- * Wrap env markers in parens * Updated from global requirements * Add more documentation around building docs * Expose a 'rpm\_version' extra command * Updated from global requirements * Use string for 'OS\_TEST\_TIMEOUT' default * Updated from global requirements * This documents the use of comments that work between 2.6, 2.7 and 3.4 1.2.0 ----- * Strip markers from test requirements * Add build\_sphinx test coverage 1.1.1 ----- * Revert "Remove sphinx\_config.init\_values() manual call" 1.1.0 ----- * Fix test case to be runnable with gnupg 2.1 * More explicit data\_files install location docs * Move CapturedSubprocess fixture to base * Remove sphinx\_config.init\_values() manual call * Updated from global requirements * builddoc: allow to use fnmatch-style exclusion for autodoc * doc: add some basic doc about pbr doc options * Add home-page into sample setup.cfg * Make setup.py --help-commands work without testrepository 1.0.1 ----- * Remove self.pre\_run calls in packaging.py * Add kerberos deps to build the kerberos wheel * Update hacking to 0.10.x series 1.0.0 ----- * Teach pbr to read extras and env markers * Bump integration test timeouts * Finish removing invocations of pip * Advertise support for Python3.4 * Issue #1451976: handle commits with non-utf8 text * Parallelise integration tests 0.11.0 ------ * Issue #1450210: Preversioning rc tag handling * Stop testing setup.py easy\_install behaviour * Test pip install -e of projects * Build all the wheels in one pass rather than many * Improve integration.sh * Stop re-invoking pip * Honour new test variable PIPVERSION * Be safe, don't let friends use RawConfigParser * Revert "Support platform-specific requirements files" * Reinstate pure-git-hashes-are-illegal test * Ignore non-release related tags * Test that MANIFEST.in excludes work * Fixup semver * Better diagnostics on test failure * Don't avoid egg-info quite so often * Add truncated lincense text * Allow overwriting sphinx builder from command line * "packages" might list multiple directories * Support script text override for newer develop * Fixes Windows generated scripts headers * Update README format to match release notes tool * Be more aggressive about building wheels * Use a wheelhouse in the integration test * Build a wheel when integration testing PBR changes * Use unsafe OpenPGP keys for testing 0.10.7 ------ * \_get\_revno only returns a commit count * Move write\_pbr\_json to avoid issues with nose * Properly check for git before getting git dir * Port in git sha changes from 0.10 line 0.10.6 ------ * Integration test PBR commits 0.10.5 ------ * Move write\_pbr\_json to avoid issues with nose 0.10.4 ------ * Properly check for git before getting git dir * Use post version signifiers * Only import sphinx during hook processing 0.10.3 ------ * Stop including git sha in version strings * Write and read more complex git sha info 0.10.1 ------ * Prefix git suffixes with + instead of * Workflow documentation is now in infra-manual * Packagers vs package maintainers * Support platform-specific requirements files * Remove extraneous vim editor configuration comments * Clean up links and docs * Add user.name to git config in tests * Honour validly tagged versions * autodoc: allow to exclude modules from auto-generation * tests: factorize has\_opt handling * Support reading versions from wheels * Only consider tags that look like versions * Refactor oneline changelog walking * Stop using intersphinx * Retry the integration setup on connection error * Fixes a spelling error * Adds option for excluding files from autodoc trees * Allow for inclusion of authors and changelog when building docs * Work toward Python 3.4 support and testing * Accept capitalized Sem-Ver headers * use get\_boolean\_option for warnerrors * Handle more legacy version numbers * Look for and process sem-ver pseudo headers in git * Raise an error if preversion versions are too low * Teach pbr about post versioned dev versions * Handle more local dev version cases * Introduce a SemanticVersion object * cleanup tox.ini * add docs env to tox * Bump hacking to 0.9.x series 0.10.2 ------ * Remove all 2.7 filtering * Stop filtering out argparse * Remove mirror testing from the integration script 0.9.0 ----- * Allow examining parsing exceptions * Update integration script for Apache 2.4 * Restore Monkeypatched Distribution Instance * Register testr as a distutil entry point * Check for git before querying it for a version * Allow \_run\_cmd to run commands in any directory * Make setUp fail if sdist fails * Permit pre-release versions with git metadata * Un-nest some sections of code 0.8.2 ----- * Remove --use-mailmap as it's not needed * Fix typos in docs 0.8.1 ----- * pbr/testr\_command.py: Add logging * Documentation updates * Fixed a typo in the documentation * Make source configurable when using --coverage * README.rst: tweaks * Format autoindex.rst file properly * make pbr use hacking directly 0.8.0 ----- * Use unicode\_literals import instead of u'unicode' notation * Add support for nested requirements files * Remove pip version specifier * Make tools/integration.sh take a branch * Fixes blocking issue on Windows * Remove duplicate git setup in tests 0.7.0 ----- * Factor run\_cmd out of the base class * Return the real class in VersionInfo \_\_repr\_\_ * Fix up some docstrings * Init sphinx config values before accessing them * Remove copyright from empty files * Declare support for Python versions in setup.cfg * Updated from global requirements * Remove unused \_parse\_mailmap() * Add support for python 3-<3.3 * Remove tox locale overrides * Do not force log verbosity level to info 0.6 --- * package: read a specific Python version requirement file * Fix typos in documents * Pin sphinx to <1.2 * Build mirror using proper requirements * Use wheels for installation * show diff between pip installed packages and requirements * Enable wheel processing in the tests * Clean up integration script * Let git do mailmap mapping for us * Make ChangeLog more condensed * Make sphinx builders configurable in LocalBuildDoc * Serve local mirror using apache * Bump the development status classifier * Support building wheels (PEP-427) * Ignore jenkins@openstack.org in authors building * Communicate to user when we skip a requirement * Base python 2.7 skip list on parsed names * Link to the documentation in the readme * Avoid using 'which' executable for finding git * Add wheel mirror structure awareness * Remove pip as an argument from mkvenv * Use pypi-mirror instead of jeepyb 0.5.23 ------ * Fixed pbr install to not error out and fail if git is not installed 0.5.22 ------ * Move base test case logic out of \_\_init\_\_.py * Comply with pep440 * Prevent ordereddict installation on 2.7 * Do not pass unicode where byte strings are wanted * Add the semver documentation * Get rid of PyPI URL override in integration test * Add a test for command registration * Switch away from tearDown for BaseTests * Fix test\_changelog when git isn't globally setup * Rework run\_shell\_command * Update tox config to use latest upgrades * Add a hint for users who don't have git installed * Add pypy to tox.ini * Fix python-ldap mirroring * Stop checking periods in commit messages * Fixes issue with command escaping on Windows * Added documentation for packagers * Update requirements in integration test * Sync requirements with global requirements * Fix pep8 error * Add option to run testr serially 0.5.21 ------ * Replace setuptools\_git with a smarter approach * Clean up some style warnings * Consume the OpenStack mirror by default * Correct typo * Improve AUTHORS file generation * Ensure \_run\_shell\_command always returns same type * Hierarchical sphinx API documentation generation * Add support for classmethod console scripts * Add skip\_pip\_install to setup.cfg * Remove missing\_reuqires optimization * Use the same logic everywhere for requirements files 0.5.20 ------ * Fix .mailmap file search location * Swap the order of configparser imports * Install jeepyb and requirements from REPODIR * Stop trying to install old pip and setuptools * Add support for globbing in data files * Revert include\_package\_data change * Also patch easy\_install script creation * Loop over test output for better readability * Add more documentation * We force installs via pip, we should declare it * Fix python 3.3 tests * Replace entry\_points console\_scripts * Fix integer\_types type under python 3 * Remove the need to specify the pbr hook * Move d2to1 more into the source tree * Set defaults directly in option processing * Do not assume the tests run as jenkins * Add libffi-dev * Ignore project creator while generating AUTHORS 0.5.19 ------ * Add Python 3.3 checking * Fix some issues in setup.py test * Put AUTHORS.in at the top of AUTHORS * Support version override with PBR prefix * Don't try to install requirements repo * Allow pip installation to be skipped * Don't run pip needlessly * Clean up hacking and path issues with d2to1 * Support version override with PBR prefix * Fix some issues in setup.py test * Add support for namespace packages 0.5.18 ------ * testr: fix Python 3 compatibility * Allow to override requirement files via environ * Add importlib to the filter list * Fix integration script * Move testrepository to test-requirements.txt * Make python setup.py test do the right thing * Add an integration test script 0.5.17 ------ * Fix pip invocation for old versions of pip 0.5.16 ------ * Remove explicit depend on distribute 0.5.15 ------ * Use pip instead of easy\_install for installation * Skip git-checkout related tests when .git is missing * Refactor hooks file 0.5.14 ------ * Remove jinja pin 0.5.12 ------ * Explicitly install install\_requires * More python3 compatibility * python3: Use six everywhere * Add vim and emacs files to .gitignore * Fix sphinx autodoc module headers * Handle versioned egg fragments in dependency URLs * Fix tox.ini to not reference deleted testenv 0.5.11 ------ * Fix author and changelog skipping * Fix doc autoindexing * Changed \_get\_git\_directory() to use git itself * python3: Python3.x compatibility * Cosmetic doc fixes * Make parse errors meaningful * Add missing files to the tarball 0.5.10 ------ * Default to single-version-externally-managed * Add config for skipping authors and changelog * Update to d2to1 0.2.10 * Added some additional zest.releaser hooks to deal with some perpetual annoyances of doing releases * When catching exceptions from a setup-hook don't throw a traceback up for sys.exit() 0.5.8 ----- * Don't try to smart find packages that aren't there * Add support for manpages * Support forcing single-version-externally-managed * Add more iterations to BuildDoc coverage * Align Sphinx autodoc options with oslo 0.5.6 ----- * Using \_\_file\_\_ was getting the wrong location * Add support for configuring pbr sphinx options * Add support for warnings=errors * Fix authors generation from co-authored-by * Ported in Co-authored-by support from oslo * Fix up the test suite to actually all pass * Added version code * Update tox to run hacking * Make hacking compliant * Ensure that reuqirements files are shipped * Add MANIFEST.in 0.5.0 ----- * Update documentation for release * Rename back to PBR * Remove extra complexity of six.u wrapper * Add contributing file * Fix .gitreview file * Add docs dir * Cleaned up stdout capture invocation * Put stdout capture in the base test class * Amended README testing instructions * Update README with test running instructions * Move setup\_requires to setup\_requires\_dist * Move sphinx to test-reuqirements * Deal with Sphinx import ordering * Add a better todo comment * Update docs * Just use d2to1 via hooks mechanism * Add a few more backwards compatibility options * Fix exception handling error * Lower distribute requirement - it's too extreme * Remove sex requirement, since it's in extern * Remove version that had been used for testing * pep8/pyflakes fixes * Fix things up to work with nova * Split out oslo.packaging * Fix two minor style things * Fixes #20. Make sure the manifest\_maker monkeypatch only occurs once and that it gets all its required locals encapsulated. This is why I hate any sort of monkey-patching, but it appears to be the only way to get this feature working * Fix 'except as' statements that were breaking things in Python 2.5 * Somehow neglected to include extra-file.txt used in the test for #15. Also moved the info log message that was causing othere tests to fail to only appear when commands are run that actually affect the manifest generation * Yet another Python 2.5 fix; I really ought to just set up my own 2.5 installation * multiprocessing doesn't exist in Python 2.5 * Adds six.py and makes the necessary tweaks to improve support for using d2to1 natively across Python versions * Fix 'except as' statements that were breaking things in Python 2.5 * Somehow neglected to include extra-file.txt used in the test for #15. Also moved the info log message that was causing othere tests to fail to only appear when commands are run that actually affect the manifest generation * Adds support for the tests\_require keyword from setuptools. Tested by eating our own dogfood -- ./setup.py test now works for d2to1's own setup * support test suite and test tests-require * Patch manifest\_maker to support extra\_files--this is really the only way I can see to do this at all sensibly * This is a test that should really work to test this feature * openstack.common.setup: fails to get version from git * Use oslo-config-2013.1b3 * Change the check for the existence of .git directory * returncode for Popen is valid only after communicate call * sort options to make --help output prettier * Allow running test in uninstalled source tree * Remove openstack.common.db.common * Add missing DBDuplicateEntry * Import sqlalchemy session/models/utils * Implements import\_group * Fix Pep8 Warning * Fixes "is not", "not in" syntax usage * setup: count revs for revno if there are no tags * Use testtools as test base class * Move logging config options into the log module * Use revno and git sha for pre-release versioning * Add env var version override for packagers * trivial pep whitespace fix * Remove write\_requirements * Rolling back to previous version of resolve\_name; it's possible this can fail in the presence of a broken namespace package, but I want to make sure that's the case before trying to fix it * Stop using no longer valid -E option for pip * oops - now compatible with python 3 * use regexp to parse the mailmap * Make tox run doctests * Verbose should not enable debug level logging * Fix pep8 E125 errors * Simplify version processing * Any exception that escapes from a hook gets reported along with a stack trace * Revert "Support lookup of value using "group.key"" * remove debugging * Add a latex command to our sphinx builders * Support lookup of value using "group.key" * debugging auto builds * debugging auto builds * fix up importer; clean hook name before search * show traceback when a hook encounters an exception * Add deprecated --logdir common opt * Add deprecated --logfile common opt * Allow nova and others to override some logging defaults * Fixing the trim for ListOp when reading from config file * Fix set\_default() with boolean CLI options * Rename utils.py to strutils.py * Improve cfg's argparse sub-parsers support * Make project pyflakes clean * Fix regression with cfg CLI arguments * Fix ListOpt to trim whitespace * Hide the GroupAttr conf and group attributes * Fix broken --help with CommonConfigOpts * updating sphinx documentation * Don't reference argparse.\_StoreAction * Fix minor coding style issue * Remove ConfigCliParser class * Add support for positional arguments * Use stock argparse behaviour for optional args * Use stock argparse --usage behaviour * Use stock argparse --version behaviour * Remove add\_option() method * Completely remove cfg's disable\_interspersed\_args() * argparse support for cfg * Remove openstack.common.config and extensions * Fixes setup compatibility issue on Windows * Move utils.execute to its own module * Add a missing comma in a docstring * Import order clean-up * Fix a logic error in stable version calculation * cfg: fix required if option has a dash * Fix a couple of file handle leaks, using with statements * Ignore the stackforge jenkins email address * Extracted parse\_host\_port into network\_utils * Add the rpc service and delete manager * Some changes I made weeks ago but forgot to commit * Added a method for parsing host:port pairs * Add basic periodic task infrastructure * Move manager.py and service.py into common * cfg: clean up None value handling * Allow set\_default and set\_override to use None * Tilde expansion for --config-file and --config-dir * Add multiple-driver support to the notifier api * Fix errors reported by pyflakes * Add import\_opt() method to ConfigOpts * Remove unused imports * Modifies \_is\_opt\_registered fcn to check for duplicate opts * fix bug lp:1019348,update openstack-common to support pep8 1.3 * cfg: allow empty config values * Add SKIP\_GENERATE\_AUTHORS option to setup.py * Add SKIP\_WRITE\_GIT\_CHANGELOG option to setup.py * Fix missing gettextutils in several modules * Fix up pre-versioning based on testing in glance * cfg: Fix typo in documentation * Fix mailmap * Split read\_versioninfo into a method * Add common logging and notification * Add support for tag based versioning * Skip argparse when injecting requirements * Update common code to support pep 1.3. bug 1014216 * Add autodoc generation to the build\_sphinx command * Use 'is not None' instead of '!= None' * Adds support for bol and eol spaces to ini files * Add support to include config aliases * Create testutils with skip decorators * cfg: add a global CONF object * cfg: add generators for iterating over all options * cfg: move constructor args to \_\_call\_\_() args * Added support for proper bare URLs * Backslash continuation removal (common folsom-1) * Alphabetize imports in openstack/common/cfg.py * cfg: make reset() clear defaults and overrides * cfg: automatically create option groups * cfg: allow options to be marked as required * cfg: use a list comprehension instead of map() * Encapsulate common sdist actions into a cmdclass * Truly handle mailmap entries for all combinations * New ConfigOpts.find\_file() for locating conf files * Handle authors existed before VCS was around * Support for directory source of config files * Provide file extension when when looking for files * Some refactoring of the cfg cache * Add caching to openstack.common.cfg * Add AUTHORS generation function * Change behavior in utils.import\_object() * Move auth\_str\_equal() to a new authutils module * Create openstack.common.timeutils * Typofix, OptionGroup should be OptGroup * Use absolute import for iniparser * Make 'yes' also a true boolean * Finish implementing MultiStrOpt * Avoid leaking secrets into config logging * Add auth\_str\_equal() to common utils * Fix bug 954488 * fix restructuredtext formatting in docstrings * Add ConfigOpts.print\_help() * Fixes a bug where entry\_points in a setup.py are blanked out if there are no entry points defined in setup.cfg * Promote more complete support for ISO 8601 time * cfg: fix a small comment typo * Several changes that I accidentally committed to d2to1's old repository. See the changes to the changelog for more details * cfg: unneeded multiple inheritance * PEP8 cleanup (openstack-common) * Backslash continuations (misc.) * Disable ConfigParser interpolation (lp#930270) * Add git changelog method * Add git vcsversion method * Updated tox config for multi-python testing * Split functions to avoid eventlet import * Implements blueprint separate-nova-volumeapi * Makes common/cfg.py raise AttributeError * PEP8 type comparison cleanup * Add the Mapping interface to cfg.ConfigOpts * Add support to cfg for disabling interspersed args * Add new cfg module * import should not return an instance of the class * use isinstance and types.\* * make fix\_path a real function so it can be mock'd * remove unused imports * merge in upstream * port execute and utcnow functions from nova * pull parse\_mailmap str\_dict\_replace from nova * reog from import merge * Rajaram/Vinkesh|Default xmlns for extension serialization can be overriden, added default factory for extension middleware * Rajaram/Vinkesh | Copied tests for wsgi from nova. Added default content/accept types in Request which can be overridden by projects. Copied tests for XML serialization of Extension Controller's action from nova * Rajaram/Vinkesh | Fixed the extension bug where custom collection actions' routes in resource extension were not getting registered * Vinkesh/Rajaram|Added nova's extension framework into common and tests for it * Rajaram|renamed AppBaseException to OpenstackException and app\_config\_dir\_name to config\_dir as per jaypipes' feedback * Rajaram/Vinkesh | Removed references to Glance in code * Adding syslog support * Add some more generic middleware, request context, utils, and versioning. Add basic template for server binary * Initial skeleton project ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/LICENSE0000664000175000017500000002363700000000000013425 0ustar00zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/PKG-INFO0000664000175000017500000000243700000000000013510 0ustar00zuulzuul00000000000000Metadata-Version: 2.1 Name: pbr Version: 5.11.1 Summary: Python Build Reasonableness Home-page: https://docs.openstack.org/pbr/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Project-URL: Bug Tracker, https://bugs.launchpad.net/pbr/ Project-URL: Documentation, https://docs.openstack.org/pbr/ Project-URL: Source Code, https://opendev.org/openstack/pbr Description: Python Build Reasonableness Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Console Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Requires-Python: >=2.6 Description-Content-Type: text/x-rst; charset=UTF-8 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/README.rst0000664000175000017500000000376400000000000014106 0ustar00zuulzuul00000000000000Introduction ============ .. image:: https://img.shields.io/pypi/v/pbr.svg :target: https://pypi.python.org/pypi/pbr/ :alt: Latest Version .. image:: https://img.shields.io/pypi/dm/pbr.svg :target: https://pypi.python.org/pypi/pbr/ :alt: Downloads PBR is a library that injects some useful and sensible default behaviors into your setuptools run. It started off life as the chunks of code that were copied between all of the `OpenStack`_ projects. Around the time that OpenStack hit 18 different projects each with at least 3 active branches, it seemed like a good time to make that code into a proper reusable library. PBR is only mildly configurable. The basic idea is that there's a decent way to run things and if you do, you should reap the rewards, because then it's simple and repeatable. If you want to do things differently, cool! But you've already got the power of Python at your fingertips, so you don't really need PBR. PBR builds on top of the work that `d2to1`_ started to provide for declarative configuration. `d2to1`_ is itself an implementation of the ideas behind `distutils2`_. Although `distutils2`_ is now abandoned in favor of work towards `PEP 426`_ and Metadata 2.0, declarative config is still a great idea and specifically important in trying to distribute setup code as a library when that library itself will alter how the setup is processed. As Metadata 2.0 and other modern Python packaging PEPs come out, PBR aims to support them as quickly as possible. * License: Apache License, Version 2.0 * Documentation: https://docs.openstack.org/pbr/latest/ * Source: https://opendev.org/openstack/pbr * Bugs: https://bugs.launchpad.net/pbr * Release Notes: https://docs.openstack.org/pbr/latest/user/releasenotes.html * ChangeLog: https://docs.openstack.org/pbr/latest/user/history.html .. _d2to1: https://pypi.python.org/pypi/d2to1 .. _distutils2: https://pypi.python.org/pypi/Distutils2 .. _PEP 426: http://legacy.python.org/dev/peps/pep-0426/ .. _OpenStack: https://www.openstack.org/ ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1673431461.88996 pbr-5.11.1/doc/0000775000175000017500000000000000000000000013152 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/requirements.txt0000664000175000017500000000036100000000000016436 0ustar00zuulzuul00000000000000sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD sphinxcontrib-apidoc>=0.2.0 # BSD openstackdocstheme>=1.18.1 # Apache-2.0 reno>=2.5.0 # Apache-2.0 six==1.12.0 # MIT ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1673431461.88996 pbr-5.11.1/doc/source/0000775000175000017500000000000000000000000014452 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/conf.py0000664000175000017500000000626000000000000015755 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2020 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinxcontrib.apidoc'] # make openstackdocstheme optional to not increase the needed dependencies try: import openstackdocstheme extensions.append('openstackdocstheme') except ImportError: openstackdocstheme = None # openstackdocstheme options # Deprecated options for docstheme < 2.2.0, can be removed once # pbr stops supporting py27. repository_name = 'openstack/pbr' bug_project = 'pbr' bug_tag = '' # New options with openstackdocstheme >=2.2.0 openstackdocs_repo_name = 'openstack/pbr' openstackdocs_auto_name = False openstackdocs_bug_project = 'pbr' openstackdocs_bug_tag = '' # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'pbr' copyright = '2013, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' exclude_trees = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. if openstackdocstheme is not None: html_theme = 'openstackdocs' else: html_theme = 'default' # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, '%s Documentation' % project, 'OpenStack Foundation', 'manual'), ] # -- sphinxcontrib.apidoc configuration -------------------------------------- apidoc_module_dir = '../../pbr' apidoc_output_dir = 'reference/api' apidoc_excluded_paths = [ 'tests', ] ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1673431461.88996 pbr-5.11.1/doc/source/contributor/0000775000175000017500000000000000000000000017024 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/contributor/index.rst0000664000175000017500000000301200000000000020661 0ustar00zuulzuul00000000000000============== Contributing ============== Basic Details ============= .. include:: ../../../CONTRIBUTING.rst Running the Tests for pbr ========================= The testing system is based on a combination of `tox`_ and `testr`_. The canonical approach to running tests is to simply run the command ``tox``. This will create virtual environments, populate them with dependencies and run all of the tests that OpenStack CI systems run. Behind the scenes, tox is running ``testr run --parallel``, but is set up such that you can supply any additional testr arguments that are needed to tox. For example, you can run: ``tox -- --analyze-isolation`` to cause tox to tell testr to add ``--analyze-isolation`` to its argument list. It is also possible to run the tests inside of a virtual environment you have created, or it is possible that you have all of the dependencies installed locally already. If you'd like to go this route, the requirements are listed in ``requirements.txt`` and the requirements for testing are in ``test-requirements.txt``. Installing them via pip, for instance, is simply:: pip install -r requirements.txt -r test-requirements.txt If you go this route, you can interact with the testr command directly. Running ``testr run`` will run the entire test suite. ``testr run --parallel`` will run it in parallel (this is the default incantation tox uses). More information about testr can be found at: http://wiki.openstack.org/testr .. _tox: http://tox.testrun.org/ .. _testr: https://wiki.openstack.org/wiki/Testr ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/index.rst0000664000175000017500000000275300000000000016322 0ustar00zuulzuul00000000000000================================= pbr - Python Build Reasonableness ================================= A library for managing *setuptools* packaging needs in a consistent manner. *pbr* reads and then filters the ``setup.cfg`` data through a setup hook to fill in default values and provide more sensible behaviors, and then feeds the results in as the arguments to a call to ``setup.py`` - so the heavy lifting of handling Python packaging needs is still being done by *setuptools*. Note that we don't support the ``easy_install`` aspects of *setuptools*: while we depend on ``setup_requires``, for any ``install_requires`` we recommend that they be installed prior to running ``setup.py install`` - either by hand, or by using an install tool such as *pip*. *pbr* can and does do a bunch of things for you: * **Version**: Manage version number based on git revisions and tags * **AUTHORS**: Generate AUTHORS file from git log * **ChangeLog**: Generate ChangeLog from git log * **Manifest**: Generate a sensible manifest from git files and some standard files * **Release Notes**: Generate a release notes file using reno * **Requirements**: Store your dependencies in a pip requirements file * **long_description**: Use your README file as a long_description * **Smart find_packages**: Smartly find packages under your root package * **Sphinx Autodoc**: Generate autodoc stub files for your whole module Contents -------- .. toctree:: :maxdepth: 2 user/index reference/index contributor/index ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1673431461.88996 pbr-5.11.1/doc/source/reference/0000775000175000017500000000000000000000000016410 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/reference/index.rst0000664000175000017500000000013100000000000020244 0ustar00zuulzuul00000000000000=================== pbr API Reference =================== .. toctree:: api/modules ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8939602 pbr-5.11.1/doc/source/user/0000775000175000017500000000000000000000000015430 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/compatibility.rst0000664000175000017500000000236600000000000021042 0ustar00zuulzuul00000000000000.. The name of this document and the anchor in this document must be treated as a stable API. Links to this document are coded into pbr and deployed versions of pbr will refer users to this document in the case of certain errors. Ensure any link you use in PBR is defined via a ref with .. _name. =================== Compatibility Notes =================== Useful notes about errors users may encounter when features cannot be supported on older versions of setuptools / pip / wheel. setuptools ========== .. _evaluate-marker: evaluate_marker --------------- evaluate_markers may run into issues with the '>', '>=', '<', and '<=' operators if the installed version of setuptools is less than 17.1. Projects using these operators with markers should specify a minimum version of 17.1 for setuptools. pip === markers ------- For versions of pip < 7 with pbr < 1.9, dependencies that use markers will not be installed. Projects using pbr and markers should set a minimum version of 1.9 for pbr. Recommended setup.py ==================== :ref:`setup_py`. Sphinx ====== .. _sphinx-1.5: Version 1.5.0+ -------------- The ``warning-is-error`` flag is only supported by Sphinx 1.5 and will cause errors when used with older versions. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/features.rst0000664000175000017500000002764200000000000020013 0ustar00zuulzuul00000000000000========== Features ========== To understand what *pbr* can do for you, it's probably best to look at two projects: one using pure *setuptools*, and another using *pbr*. First, let's look at the *setuptools* project. .. code-block:: none $ tree -L 1 . ├── AUTHORS ├── CHANGES ├── LICENSE ├── MANIFEST.in ├── README.rst ├── requirements.txt ├── setup.cfg ├── setup.py └── somepackage $ cat setup.py setuptools.setup( name='mypackage', version='1.0.0', description='A short description', long_description="""A much longer description...""", author="John Doe", author_email='john.doe@example.com', license='BSD', ) Here's a similar package using *pbr*: .. code-block:: none $ tree -L 1 . ├── LICENSE ├── README.rst ├── setup.cfg ├── setup.py └── somepackage $ cat setup.py setuptools.setup( pbr=True ) $ cat setup.cfg [metadata] name = mypackage description = A short description description-file = README.rst author = John Doe author-email = john.doe@example.com license = BSD From this, we note a couple of the main features of *pbr*: - Extensive use of ``setup.cfg`` for configuration - Automatic package metadata generation (``version``) - Automatic metadata file generation (``AUTHOR``, ``ChangeLog``, ``MANIFEST.in``, ``RELEASENOTES.txt``) In addition, there are other things that you don't see here but which *pbr* will do for you: - Helpful extensions to *setuptools* commands setup.cfg --------- .. admonition:: Summary *pbr* uses ``setup.cfg`` for all configuration, though ``setup.py`` is still required. One of the main features of *distutils2* was the use of a ``setup.cfg`` INI-style configuration file. This was used to define a package's metadata and other options that were normally supplied to the ``setup()`` function. Recent versions of `setuptools`__ have implemented some of this support, but *pbr* still allows for the definition of the following sections in ``setup.cfg``: - ``files`` - ``entry_points`` - ``backwards_compat`` For more information on these sections, refer to :doc:`/user/using`. __ https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files Package Metadata ---------------- .. admonition:: Summary *pbr* removes the need to define a lot of configuration in either ``setup.py`` or ``setup.cfg`` by extracting this information from Git. Version ~~~~~~~ .. admonition:: Summary *pbr* will automatically configure your version for you by parsing semantically-versioned Git tags. Versions can be managed two ways - *post-versioning* and *pre-versioning*. *Post-versioning* is the default while *pre-versioning* is enabled by setting ``version`` in the ``setup.cfg`` ``metadata`` section. In both cases the actual version strings are inferred from Git. If the currently checked out revision is tagged, that tag is used as the version. If the currently checked out revision is not tagged, then we take the last tagged version number and increment it to get a minimum target version. .. note:: *pbr* supports both bare version tag (e.g. ``0.1.0``) and version prefixed with ``v`` or ``V`` (e.g. ``v0.1.0``) We then walk Git history back to the last release. Within each commit we look for a ``Sem-Ver:`` pseudo header and, if found, parse it looking for keywords. Unknown symbols are not an error (so that folk can't wedge *pbr* or break their tree), but we will emit an info-level warning message. The following symbols are recognized: - ``feature`` - ``api-break`` - ``deprecation`` - ``bugfix`` A missing ``Sem-Ver`` line is equivalent to ``Sem-Ver: bugfix``. The ``bugfix`` symbol causes a patch level increment to the version. The ``feature`` and ``deprecation`` symbols cause a minor version increment. The ``api-break`` symbol causes a major version increment. If *post-versioning* is in use, we use the resulting version number as the target version. If *pre-versioning* is in use, we check that the version set in the metadata section of ``setup.cfg`` is greater than the version we infer using the above method. If the inferred version is greater than the *pre-versioning* value we raise an error, otherwise we use the version from ``setup.cfg`` as the target. We then generate dev version strings based on the commits since the last release and include the current Git SHA to disambiguate multiple dev versions with the same number of commits since the release. .. note:: *pbr* expects Git tags to be signed for use in calculating versions. The versions are expected to be compliant with :doc:`semver`. The ``version.SemanticVersion`` class can be used to query versions of a package and present it in various forms - ``debian_version()``, ``release_string()``, ``rpm_string()``, ``version_string()``, or ``version_tuple()``. Long Description ~~~~~~~~~~~~~~~~ .. admonition:: Summary *pbr* can extract the contents of a ``README`` and use this as your long description There is no need to maintain two long descriptions and your ``README`` file is probably a good long_description. So we'll just inject the contents of your ``README.rst``, ``README.txt`` or ``README`` file into your empty ``long_description``. You can also specify the exact file you want to use using the ``description-file`` parameter. You can set the ``description-content-type`` to a MIME type that may help rendering of the description; for example ``text/markdown`` or ``text/x-rst; charset=UTF-8``. Requirements ~~~~~~~~~~~~ .. admonition:: Summary *pbr* will extract requirements from ``requirements.txt`` files and automatically populate the ``install_requires``, ``tests_require`` and ``dependency_links`` arguments to ``setup`` with them. You may not have noticed, but there are differences in how pip ``requirements.txt`` files work and how *setuptools* wants to be told about requirements. The *pip* way is nicer because it sure does make it easier to populate a *virtualenv* for testing or to just install everything you need. Duplicating the information, though, is super lame. To solve this issue, *pbr* will let you use ``requirements.txt``-format files to describe the requirements for your project and will then parse these files, split them up appropriately, and inject them into the ``install_requires``, ``tests_require`` and/or ``dependency_links`` arguments to ``setup``. Voila! Finally, it is possible to specify groups of optional dependencies, or :ref:`"extra" requirements `, in your ``setup.cfg`` rather than ``setup.py``. .. versionchanged:: 5.0 Previously you could specify requirements for a given major version of Python using requirments files with a ``-pyN`` suffix. This was deprecated in 4.0 and removed in 5.0 in favour of environment markers. Automatic File Generation ------------------------- .. admonition:: Summary *pbr* can automatically generate a couple of files, which would normally have to be maintained manually, by using Git data. AUTHORS, ChangeLog ~~~~~~~~~~~~~~~~~~ .. admonition:: Summary *pbr* will automatically generate an ``AUTHORS`` and a ``ChangeLog`` file using Git logs. Why keep an ``AUTHORS`` or a ``ChangeLog`` file when Git already has all of the information you need? ``AUTHORS`` generation supports filtering/combining based on a standard ``.mailmap`` file. Manifest ~~~~~~~~ .. admonition:: Summary *pbr* will automatically generate a ``MANIFEST.in`` file based on the files Git is tracking. Just like ``AUTHORS`` and ``ChangeLog``, why keep a list of files you wish to include when you can find many of these in Git. ``MANIFEST.in`` generation ensures almost all files stored in Git, with the exception of ``.gitignore``, ``.gitreview`` and ``.pyc`` files, are automatically included in your distribution. In addition, the generated ``AUTHORS`` and ``ChangeLog`` files are also included. In many cases, this removes the need for an explicit ``MANIFEST.in`` file, though one can be provided to exclude files that are tracked via Git but which should not be included in the final release, such as test files. .. note:: ``MANIFEST.in`` files have no effect on binary distributions such as wheels. Refer to the `Python packaging tutorial`__ for more information. __ https://packaging.python.org/tutorials/distributing-packages/#manifest-in Release Notes ~~~~~~~~~~~~~ .. admonition:: Summary *pbr* will automatically use *reno* \'s ``build_reno`` setuptools command to generate a release notes file, if reno is available and configured. If using *reno*, you may wish to include a copy of the release notes in your packages. *reno* provides a ``build_reno`` `setuptools command`__ and, if reno is present and configured, *pbr* will automatically call this to generate a release notes file for inclusion in your package. __ https://docs.openstack.org/reno/latest/user/setuptools.html Setup Commands -------------- .. _build_sphinx: ``build_sphinx`` ~~~~~~~~~~~~~~~~ .. admonition:: Summary *pbr* will override the Sphinx ``build_sphinx`` command to use *pbr*-provided package metadata and automatically generate API documentation. .. deprecated:: 4.2 This feature has been superseded by the `sphinxcontrib-apidoc`__ (for generation of API documentation) and :ref:`pbr.sphinxext` (for configuration of versioning via package metadata) extensions. It will be removed in a future release. __ https://pypi.org/project/sphinxcontrib-apidoc/ Sphinx can produce auto documentation indexes based on signatures and docstrings of your project but you have to give it index files to tell it to *autodoc* each module: that's kind of repetitive and boring. *pbr* will scan your project, find all of your modules, and generate all of the stub files for you. In addition, Sphinx documentation setups are altered to have several pieces of information that are known to ``setup.py`` injected into the Sphinx config. See the :ref:`pbr-setup-cfg` section of the configuration file for details on configuring your project for *autodoc*. ``test`` ~~~~~~~~ .. admonition:: Summary *pbr* will automatically alias the ``test`` command to use the testing tool of your choice. .. deprecated:: 4.0 *pbr* overrides the *setuptools* ``test`` command if using `testrepository`__ or `nose`__ (deprecated). - *pbr* will check for a ``.testr.conf`` file. If this exists and *testrepository* is installed, the ``test`` command will alias the *testr* test runner. If this is not the case... .. note:: This is separate to ``setup.py testr`` (note the extra ``r``) which is provided directly by the ``testrepository`` package. Be careful as there is some overlap of command arguments. - *pbr* will check if ``[nosetests]`` is defined in ``setup.cfg``. If this exists and *nose* is installed, the ``test`` command will alias the *nose* runner. If this is not the case... - In other cases no override will be installed and the ``test`` command will revert to the `setuptools default`__. __ https://testrepository.readthedocs.io/en/latest/ __ https://nose.readthedocs.io/en/latest/ __ https://setuptools.readthedocs.io/en/latest/setuptools.html#test-build-package-and-run-a-unittest-suite .. _pbr.sphinxext: Sphinx Extension ---------------- .. admonition:: Summary *pbr* provides a Sphinx extension to allow you to use *pbr* version metadata in your Sphinx documentation. .. versionadded:: 4.2 *pbr* provides a Sphinx extension which can be used to configure version numbers for documentation. The package does not need to be installed for this to function. .. note:: The ``openstackdocstheme`` Sphinx theme provides similar functionality. This should be preferred for official OpenStack projects. Refer to the `documentation`__ for more information. __ https://docs.openstack.org/openstackdocstheme/ For more information on the extension, refer to :doc:`/user/using`. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/history.rst0000664000175000017500000000004000000000000017655 0ustar00zuulzuul00000000000000.. include:: ../../../ChangeLog ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/index.rst0000664000175000017500000000021200000000000017264 0ustar00zuulzuul00000000000000=========== Using pbr =========== .. toctree:: features using packagers semver compatibility releasenotes history ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/packagers.rst0000664000175000017500000001033700000000000020126 0ustar00zuulzuul00000000000000=============================== Notes for Package maintainers =============================== If you are maintaining packages of software that uses *pbr*, there are some features you probably want to be aware of that can make your life easier. They are exposed by environment variables, so adding them to rules or spec files should be fairly easy. Versioning ---------- *pbr*, when run in a git repo, derives the version of a package from the git tags. When run in a tarball with a proper egg-info dir, it will happily pull the version from that. So for the most part, the package maintainers shouldn't need to care. However, if you are doing something like keeping a git repo with the sources and the packaging intermixed and it's causing pbr to get confused about whether its in its own git repo or not, you can set ``PBR_VERSION``: :: export PBR_VERSION=1.2.3 and all version calculation logic will be completely skipped and the supplied version will be considered absolute. Distribution version numbers ---------------------------- *pbr* will automatically calculate upstream version numbers for *dpkg* and *rpm* using systems. Releases are easy (and obvious). When packaging pre-releases though things get more complex. Firstly, semver does not provide for any sort order between pre-releases and development snapshots, so it can be complex (perhaps intractable) to package both into one repository - we recommend with either packaging pre-release releases (alpha/beta/rc's) or dev snapshots but not both. Secondly, as pre-releases and snapshots have the same major/minor/patch version as the version they lead up to, but have to sort before it, we cannot map their version naturally into the rpm version namespace: instead we represent their versions as versions of the release before. Dependencies ------------ As of 1.0.0 *pbr* doesn't alter the dependency behaviour of *setuptools*. Older versions would invoke *pip* internally under some circumstances and required the environment variable ``SKIP_PIP_INSTALL`` to be set to prevent that. Since 1.0.0 we now document that dependencies should be installed before installing a *pbr* using package. We don't support easy install, but neither do we interfere with it today. If you observe easy install being triggered when building a binary package, then you've probably missed one or more package requirements. .. important:: We reserve the right to disable easy install via *pbr* in future, since we don't want to debug or support the interactions that can occur when using it. .. _packaging-tarballs: Tarballs -------- *pbr* includes everything in a source tarball that is in the original *git* repository. This can again cause havoc if a package maintainer is doing fancy things with combined *git* repos, and is generating a source tarball using ``python setup.py sdist`` from that repo. If that is the workflow the packager is using, setting ``SKIP_GIT_SDIST``: :: export SKIP_GIT_SDIST=1 will cause all logic around using git to find the files that should be in the source tarball to be skipped. Beware though, that because *pbr* packages automatically find all of the files, most of them do not have a complete ``MANIFEST.in`` file, so its possible that a tarball produced in that way will be missing files. .. _packaging-authors-changelog: AUTHORS and ChangeLog --------------------- *pbr* generates ``AUTHORS`` and ``ChangeLog`` files from *git* information. This can cause problem in distro packaging if package maintainer is using *git* repository for packaging source. If that is the case setting ``SKIP_GENERATE_AUTHORS`` :: export SKIP_GENERATE_AUTHORS=1 will cause logic around generating ``AUTHORS`` using *git* information to be skipped. Similarly setting ``SKIP_WRITE_GIT_CHANGELOG`` :: export SKIP_WRITE_GIT_CHANGELOG=1 will cause logic around generating ``ChangeLog`` file using *git* information to be skipped. .. _packaging-releasenotes: Release Notes ------------- *pbr* generates a release notes file, typically called ``RELEASENOTES.rst``, if `reno`_ is present and configured. You may wish to disable this functionality. If that is the case setting ``SKIP_GENERATE_RENO`` :: export SKIP_GENERATE_RENO will disable this feature. .. _reno: https://docs.openstack.org/reno/latest/ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/releasenotes.rst0000664000175000017500000000015000000000000020647 0ustar00zuulzuul00000000000000=============== Release Notes =============== .. include:: ../../../RELEASENOTES.rst :start-line: 4 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/semver.rst0000664000175000017500000004131100000000000017463 0ustar00zuulzuul00000000000000Linux/Python Compatible Semantic Versioning 3.0.0 ================================================= This is a fork of Semantic Versioning 2.0. The specific changes have to do with the format of pre-release and build labels, specifically to make them not confusing when co-existing with Linux distribution packaging and Python packaging. Inspiration for the format of the pre-release and build labels came from Python's PEP440. Changes vs SemVer 2.0 --------------------- #. dev versions are defined. These are extremely useful when dealing with CI and CD systems when 'every commit is a release' is not feasible. #. All versions have been made PEP-440 compatible, because of our deep roots in Python: - Pre-release versions are now separated by . not -, and use a/b/c rather than alpha/beta etc. - Alpha version are prefixed by a 0 such as: ``2.0.0.0a1`` instead of ``2.0.0.a1`` for version '2.0.0 alpha 1'. Please note dev version tag does not have a leading 0, as is ``2.0.0.0a2.dev1``. Summary ------- Given a version number MAJOR.MINOR.PATCH, increment the: #. MAJOR version when you make incompatible API changes, #. MINOR version when you add functionality in a backwards-compatible manner, and #. PATCH version when you make backwards-compatible bug fixes. Introduction ------------ In the world of software management there exists a dread place called "dependency hell." The bigger your system grows and the more packages you integrate into your software, the more likely you are to find yourself, one day, in this pit of despair. In systems with many dependencies, releasing new package versions can quickly become a nightmare. If the dependency specifications are too tight, you are in danger of version lock (the inability to upgrade a package without having to release new versions of every dependent package). If dependencies are specified too loosely, you will inevitably be bitten by version promiscuity (assuming compatibility with more future versions than is reasonable). Dependency hell is where you are when version lock and/or version promiscuity prevent you from easily and safely moving your project forward. As a solution to this problem, I propose a simple set of rules and requirements that dictate how version numbers are assigned and incremented. These rules are based on but not necessarily limited to pre-existing widespread common practices in use in both closed and open-source software. For this system to work, you first need to declare a public API. This may consist of documentation or be enforced by the code itself. Regardless, it is important that this API be clear and precise. Once you identify your public API, you communicate changes to it with specific increments to your version number. Consider a version format of X.Y.Z (Major.Minor.Patch). Bug fixes not affecting the API increment the patch version, backwards compatible API additions/changes increment the minor version, and backwards incompatible API changes increment the major version. I call this system "Semantic Versioning." Under this scheme, version numbers and the way they change convey meaning about the underlying code and what has been modified from one version to the next. Semantic Versioning Specification (SemVer) ------------------------------------------ The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in `RFC 2119 `__. #. Software using Semantic Versioning MUST declare a public API. This API could be declared in the code itself or exist strictly in documentation. However it is done, it should be precise and comprehensive. #. A normal version number MUST take the form X.Y.Z where X, Y, and Z are non-negative integers, and MUST NOT contain leading zeroes. X is the major version, Y is the minor version, and Z is the patch version. Each element MUST increase numerically. For instance: 1.9.0 -> 1.10.0 -> 1.11.0. #. Once a versioned package has been released, the contents of that version MUST NOT be modified. Any modifications MUST be released as a new version. #. Major version zero (0.y.z) is for initial development. Anything may change at any time. The public API should not be considered stable. #. Version 1.0.0 defines the public API. The way in which the version number is incremented after this release is dependent on this public API and how it changes. #. Patch version Z (x.y.Z \| x > 0) MUST be incremented if only backwards compatible bug fixes are introduced. A bug fix is defined as an internal change that fixes incorrect behavior. #. Minor version Y (x.Y.z \| x > 0) MUST be incremented if new, backwards compatible functionality is introduced to the public API. It MUST be incremented if any public API functionality is marked as deprecated. It MAY be incremented if substantial new functionality or improvements are introduced within the private code. It MAY include patch level changes. Patch version MUST be reset to 0 when minor version is incremented. #. Major version X (X.y.z \| X > 0) MUST be incremented if any backwards incompatible changes are introduced to the public API. It MAY also include minor and patch level changes. Patch and minor version MUST be reset to 0 when major version is incremented. #. A pre-release version MAY be denoted by appending a dot separated identifier immediately following the patch version. The identifier MUST comprise only a, b, c followed by non-negative integer value. The identifier MUST NOT be empty. Pre-release versions have a lower precedence than the associated normal version. A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version. Examples: 1.0.0.0a1, 1.0.0.0b99, 1.0.0.0c1000. #. A development version MAY be denoted by appending a dot separated identifier immediately following the patch version. The identifier MUST comprise the string dev followed by non-negative integer value. The identifier MUST NOT be empty. Development versions have a lower precedence than the associated normal version or pre-release version. A development version is a completely unsupported and conveys no API promises when related to other versions. They are more useful as communication vehicles between developers of a community, whereas pre-releases, while potentially prone to break still, are intended for externally facing communication of not-yet-released ideas. Dev versions are not public artifacts and should never be placed in public repositories: they are intended as developer-local resources. Examples: 1.0.0.dev1, 1.0.0.0a1.dev1 #. git version metadata MAY be denoted by appending a dot separated identifier immediately following a development or pre-release version. The identifier MUST comprise the character g followed by a seven character git short-sha. The sha MUST NOT be empty. git version metadata MUST be ignored when determining version precedence. Thus two versions that differ only in the git version, have the same precedence. Example: 1.0.0.0a1.g95a9beb. #. Build metadata MAY be denoted by appending a plus sign and a series of dot separated identifiers immediately following the patch or pre-release version. Identifiers MUST comprise only ASCII alphanumerics [0-9A-Za-z]. Identifiers MUST NOT be empty. Build metadata MUST be ignored when determining version precedence. Thus two versions that differ only in the build metadata, have the same precedence. Examples: 1.0.0.0a1+001, 1.0.0+20130313144700, 1.0.0.0b1+exp.sha.5114f85. #. Precedence refers to how versions are compared to each other when ordered. Precedence MUST be calculated by separating the version into major, minor, patch, pre-release, and development identifiers in that order (Build metadata does not figure into precedence). Precedence is determined by the first difference when comparing each of these identifiers from left to right as follows: Major, minor, and patch versions are always compared numerically. Example: 1.0.0 < 2.0.0 < 2.1.0 < 2.1.1. When major, minor, and patch are equal, a pre-release version has lower precedence than a normal version. Example: 1.0.0.0a1 < 1.0.0. When major, minor, patch and pre-release are equal, a development version has a lower precedence than a normal version and of a pre-release version. Example: 1.0.0.dev1 < 1.0.0 and 1.0.0.dev9 < 1.0.0.0a1 and 1.0.0.0a1 < 1.0.0.0a2.dev4. Precedence for two pre-release versions with the same major, minor, and patch version MUST be determined by comparing the identifier to the right of the patch version as follows: if the alpha portion matches, the numeric portion is compared in numerical sort order. If the alpha portion does not match, the sort order is dev < a < b < c. Example: 1.0.0.dev8 < 1.0.0.dev9 < 1.0.0.0a1.dev3 < 1.0.0.0a1 < 1.0.0.0b2 < 1.0.0.0c1 < 1.0.0. Precedence for dev versions if all other components are equal is done by comparing their numeric component. If all other components are not equal, predence is determined by comparing the other components. Why Use Semantic Versioning? ---------------------------- This is not a new or revolutionary idea. In fact, you probably do something close to this already. The problem is that "close" isn't good enough. Without compliance to some sort of formal specification, version numbers are essentially useless for dependency management. By giving a name and clear definition to the above ideas, it becomes easy to communicate your intentions to the users of your software. Once these intentions are clear, flexible (but not too flexible) dependency specifications can finally be made. A simple example will demonstrate how Semantic Versioning can make dependency hell a thing of the past. Consider a library called "Firetruck." It requires a Semantically Versioned package named "Ladder." At the time that Firetruck is created, Ladder is at version 3.1.0. Since Firetruck uses some functionality that was first introduced in 3.1.0, you can safely specify the Ladder dependency as greater than or equal to 3.1.0 but less than 4.0.0. Now, when Ladder version 3.1.1 and 3.2.0 become available, you can release them to your package management system and know that they will be compatible with existing dependent software. As a responsible developer you will, of course, want to verify that any package upgrades function as advertised. The real world is a messy place; there's nothing we can do about that but be vigilant. What you can do is let Semantic Versioning provide you with a sane way to release and upgrade packages without having to roll new versions of dependent packages, saving you time and hassle. If all of this sounds desirable, all you need to do to start using Semantic Versioning is to declare that you are doing so and then follow the rules. Link to this website from your README so others know the rules and can benefit from them. FAQ --- How should I deal with revisions in the 0.y.z initial development phase? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The simplest thing to do is start your initial development release at 0.1.0 and then increment the minor version for each subsequent release. How do I know when to release 1.0.0? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If your software is being used in production, it should probably already be 1.0.0. If you have a stable API on which users have come to depend, you should be 1.0.0. If you're worrying a lot about backwards compatibility, you should probably already be 1.0.0. Doesn't this discourage rapid development and fast iteration? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Major version zero is all about rapid development. If you're changing the API every day you should either still be in version 0.y.z or on a separate development branch working on the next major version. If even the tiniest backwards incompatible changes to the public API require a major version bump, won't I end up at version 42.0.0 very rapidly? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This is a question of responsible development and foresight. Incompatible changes should not be introduced lightly to software that has a lot of dependent code. The cost that must be incurred to upgrade can be significant. Having to bump major versions to release incompatible changes means you'll think through the impact of your changes, and evaluate the cost/benefit ratio involved. Documenting the entire public API is too much work! ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is your responsibility as a professional developer to properly document software that is intended for use by others. Managing software complexity is a hugely important part of keeping a project efficient, and that's hard to do if nobody knows how to use your software, or what methods are safe to call. In the long run, Semantic Versioning, and the insistence on a well defined public API can keep everyone and everything running smoothly. What do I do if I accidentally release a backwards incompatible change as a minor version? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ As soon as you realize that you've broken the Semantic Versioning spec, fix the problem and release a new minor version that corrects the problem and restores backwards compatibility. Even under this circumstance, it is unacceptable to modify versioned releases. If it's appropriate, document the offending version and inform your users of the problem so that they are aware of the offending version. What should I do if I update my own dependencies without changing the public API? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ That would be considered compatible since it does not affect the public API. Software that explicitly depends on the same dependencies as your package should have their own dependency specifications and the author will notice any conflicts. Determining whether the change is a patch level or minor level modification depends on whether you updated your dependencies in order to fix a bug or introduce new functionality. I would usually expect additional code for the latter instance, in which case it's obviously a minor level increment. What if I inadvertently alter the public API in a way that is not compliant with the version number change (i.e. the code incorrectly introduces a major breaking change in a patch release) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Use your best judgment. If you have a huge audience that will be drastically impacted by changing the behavior back to what the public API intended, then it may be best to perform a major version release, even though the fix could strictly be considered a patch release. Remember, Semantic Versioning is all about conveying meaning by how the version number changes. If these changes are important to your users, use the version number to inform them. How should I handle deprecating functionality? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Deprecating existing functionality is a normal part of software development and is often required to make forward progress. When you deprecate part of your public API, you should do two things: (1) update your documentation to let users know about the change, (2) issue a new minor release with the deprecation in place. Before you completely remove the functionality in a new major release there should be at least one minor release that contains the deprecation so that users can smoothly transition to the new API. Does SemVer have a size limit on the version string? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ No, but use good judgment. A 255 character version string is probably overkill, for example. Also, specific systems may impose their own limits on the size of the string. About ----- The Linux/Python Compatible Semantic Versioning specification is maintained by the `OpenStack `_ project. It is based on The Semantic Versioning specification, which was authored by `Tom Preston-Werner `__, with inputs from `PEP 440 `_ If you'd like to leave feedback, please `open an issue `_. License ------- Creative Commons - CC BY 3.0 http://creativecommons.org/licenses/by/3.0/ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/doc/source/user/using.rst0000664000175000017500000004202700000000000017314 0ustar00zuulzuul00000000000000======= Usage ======= *pbr* is a *setuptools* plugin and so to use it you must use *setuptools* and call ``setuptools.setup()``. While the normal *setuptools* facilities are available, *pbr* makes it possible to express them through static data files. .. _setup_py: ``setup.py`` ------------ *pbr* only requires a minimal ``setup.py`` file compared to a standard *setuptools* project. This is because most configuration is located in static configuration files. This recommended minimal ``setup.py`` file should look something like this:: #!/usr/bin/env python from setuptools import setup setup( setup_requires=['pbr'], pbr=True, ) .. note:: It is necessary to specify ``pbr=True`` to enabled *pbr* functionality. .. note:: While one can pass any arguments supported by setuptools to ``setup()``, any conflicting arguments supplied in ``setup.cfg`` will take precedence. ``pyproject.toml`` ------------------ PBR can be configured as a PEP517 build-system in ``pyproject.toml``. This currently continues to rely on setuptools which means you need the above ``setup.py`` file to be present. The main benefits to using a ``pyproject.toml`` file with PBR are that you can control the versions of PBR and setuptools that are used avoiding easy_install invocation. Your build-system block in ``pyproject.toml`` will need to look something like this:: [build-system] requires = ["pbr>=5.7.0", "setuptools>=36.6.0"] build-backend = "pbr.build" Eventually PBR may grow its own direct support for PEP517 build hooks, but until then it will continue to need setuptools and ``setup.py``. .. _setup_cfg: ``setup.cfg`` ------------- The ``setup.cfg`` file is an INI-like file that can mostly replace the ``setup.py`` file. It is similar to the ``setup.cfg`` file found in recent versions of `setuptools`__. A simple sample can be found in *pbr*'s own ``setup.cfg`` (it uses its own machinery to install itself): :: [metadata] name = pbr author = OpenStack Foundation author-email = openstack-discuss@lists.openstack.org summary = OpenStack's setup automation in a reusable form description-file = README.rst description-content-type = text/x-rst; charset=UTF-8 home-page = https://launchpad.net/pbr project_urls = Bug Tracker = https://bugs.launchpad.net/pbr/ Documentation = https://docs.openstack.org/pbr/ Source Code = https://opendev.org/openstack/pbr license = Apache-2 classifier = Development Status :: 4 - Beta Environment :: Console Environment :: OpenStack Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: Apache Software License Operating System :: OS Independent Programming Language :: Python keywords = setup distutils [files] packages = pbr data_files = etc/pbr = etc/* etc/init = pbr.packaging.conf pbr.version.conf [entry_points] console_scripts = pbr = pbr.cmd:main pbr.config.drivers = plain = pbr.cfg.driver:Plain Recent versions of `setuptools`_ provide many of the same sections as *pbr*. However, *pbr* does provide a number of additional sections: - ``files`` - ``entry_points`` - ``backwards_compat`` - ``pbr`` In addition, there are some modifications to other sections: - ``metadata`` - ``build_sphinx`` For all other sections, you should refer to either the `setuptools`_ documentation or the documentation of the package that provides the section, such as the ``extract_messages`` section provided by Babel__. .. note:: Comments may be used in ``setup.cfg``, however all comments should start with a ``#`` and may be on a single line, or in line, with at least one white space character immediately preceding the ``#``. Semicolons are not a supported comment delimiter. For instance:: [section] # A comment at the start of a dedicated line key = value1 # An in line comment value2 # A comment on a dedicated line value3 .. note:: On Python 3 ``setup.cfg`` is explicitly read as UTF-8. On Python 2 the encoding is dependent on the terminal encoding. __ http://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files __ http://babel.pocoo.org/en/latest/setup.html ``files`` ~~~~~~~~~ The ``files`` section defines the install location of files in the package using three fundamental keys: ``packages``, ``namespace_packages``, and ``data_files``. ``packages`` A list of top-level packages that should be installed. The behavior of packages is similar to ``setuptools.find_packages`` in that it recurses the Python package hierarchy below the given top level and installs all of it. If ``packages`` is not specified, it defaults to the value of the ``name`` field given in the ``[metadata]`` section. ``namespace_packages`` Similar to ``packages``, but is a list of packages that provide namespace packages. ``data_files`` A list of files to be installed. The format is an indented block that contains key value pairs which specify target directory and source file to install there. More than one source file for a directory may be indicated with a further indented list. Source files are stripped of leading directories. Additionally, *pbr* supports a simple file globbing syntax for installing entire directory structures. For example:: [files] data_files = etc/pbr = etc/pbr/* etc/neutron = etc/api-paste.ini etc/dhcp-agent.ini etc/init.d = neutron.init This will result in ``/etc/neutron`` containing ``api-paste.ini`` and ``dhcp-agent.ini``, both of which *pbr* will expect to find in the ``etc`` directory in the root of the source tree. Additionally, ``neutron.init`` from that directory will be installed in ``/etc/init.d``. All of the files and directories located under ``etc/pbr`` in the source tree will be installed into ``/etc/pbr``. Note that this behavior is relative to the effective root of the environment into which the packages are installed, so depending on available permissions this could be the actual system-wide ``/etc`` directory or just a top-level ``etc`` subdirectory of a *virtualenv*. ``entry_points`` ~~~~~~~~~~~~~~~~ The ``entry_points`` section defines entry points for generated console scripts and Python libraries. This is actually provided by *setuptools* but is documented here owing to its importance. The general syntax of specifying entry points is a top level name indicating the entry point group name, followed by one or more key value pairs naming the entry point to be installed. For instance:: [entry_points] console_scripts = pbr = pbr.cmd:main pbr.config.drivers = plain = pbr.cfg.driver:Plain fancy = pbr.cfg.driver:Fancy Will cause a console script called *pbr* to be installed that executes the ``main`` function found in ``pbr.cmd``. Additionally, two entry points will be installed for ``pbr.config.drivers``, one called ``plain`` which maps to the ``Plain`` class in ``pbr.cfg.driver`` and one called ``fancy`` which maps to the ``Fancy`` class in ``pbr.cfg.driver``. ``backwards_compat`` ~~~~~~~~~~~~~~~~~~~~~ .. todo:: Describe this section .. _pbr-setup-cfg: ``pbr`` ~~~~~~~ The ``pbr`` section controls *pbr*-specific options and behaviours. ``skip_git_sdist`` If enabled, *pbr* will not generate a manifest file from *git* commits. If this is enabled, you may need to define your own `manifest template`__. This can also be configured using the ``SKIP_GIT_SDIST`` environment variable, as described :ref:`here `. __ https://packaging.python.org/tutorials/distributing-packages/#manifest-in ``skip_changelog`` If enabled, *pbr* will not generated a ``ChangeLog`` file from *git* commits. This can also be configured using the ``SKIP_WRITE_GIT_CHANGELOG`` environment variable, as described :ref:`here ` ``skip_authors`` If enabled, *pbr* will not generate an ``AUTHORS`` file from *git* commits. This can also be configured using the ``SKIP_GENERATE_AUTHORS`` environment variable, as described :ref:`here ` ``skip_reno`` If enabled, *pbr* will not generate a ``RELEASENOTES.txt`` file if `reno`_ is present and configured. This can also be configured using the ``SKIP_GENERATE_RENO`` environment variable, as described :ref:`here `. ``autodoc_tree_index_modules`` A boolean option controlling whether *pbr* should generate an index of modules using ``sphinx-apidoc``. By default, all files except ``setup.py`` are included, but this can be overridden using the ``autodoc_tree_excludes`` option. .. deprecated:: 4.2 This feature has been replaced by the `sphinxcontrib-apidoc`_ extension. Refer to the :ref:`build_sphinx` overview for more information. ``autodoc_tree_excludes`` A list of modules to exclude when building documentation using ``sphinx-apidoc``. Defaults to ``[setup.py]``. Refer to the `sphinx-apidoc man page`__ for more information. __ http://sphinx-doc.org/man/sphinx-apidoc.html .. deprecated:: 4.2 This feature has been replaced by the `sphinxcontrib-apidoc`_ extension. Refer to the :ref:`build_sphinx` overview for more information. ``autodoc_index_modules`` A boolean option controlling whether *pbr* should itself generates documentation for Python modules of the project. By default, all found Python modules are included; some of them can be excluded by listing them in ``autodoc_exclude_modules``. .. deprecated:: 4.2 This feature has been replaced by the `sphinxcontrib-apidoc`_ extension. Refer to the :ref:`build_sphinx` overview for more information. ``autodoc_exclude_modules`` A list of modules to exclude when building module documentation using *pbr*. *fnmatch* style pattern (e.g. ``myapp.tests.*``) can be used. .. deprecated:: 4.2 This feature has been replaced by the `sphinxcontrib-apidoc`_ extension. Refer to the :ref:`build_sphinx` overview for more information. ``api_doc_dir`` A subdirectory inside the ``build_sphinx.source_dir`` where auto-generated API documentation should be written, if ``autodoc_index_modules`` is set to True. Defaults to ``"api"``. .. deprecated:: 4.2 This feature has been replaced by the `sphinxcontrib-apidoc`_ extension. Refer to the :ref:`build_sphinx` overview for more information. .. note:: When using ``autodoc_tree_excludes`` or ``autodoc_index_modules`` you may also need to set ``exclude_patterns`` in your Sphinx configuration file (generally found at ``doc/source/conf.py`` in most OpenStack projects) otherwise Sphinx may complain about documents that are not in a toctree. This is especially true if the ``[sphinx_build] warning-is-error`` option is set. See the `Sphinx build configuration file`__ documentation for more information on configuring Sphinx. __ http://sphinx-doc.org/config.html .. versionchanged:: 4.2 The ``autodoc_tree_index_modules``, ``autodoc_tree_excludes``, ``autodoc_index_modules``, ``autodoc_exclude_modules`` and ``api_doc_dir`` settings are all deprecated. .. versionchanged:: 2.0 The ``pbr`` section used to take a ``warnerrors`` option that would enable the ``-W`` (Turn warnings into errors.) option when building Sphinx. This feature was broken in 1.10 and was removed in pbr 2.0 in favour of the ``[build_sphinx] warning-is-error`` provided in Sphinx 1.5+. ``metadata`` ~~~~~~~~~~~~ .. todo:: Describe this section .. _build_sphinx-setup-cfg: ``build_sphinx`` ~~~~~~~~~~~~~~~~ .. versionchanged:: 3.0 The ``build_sphinx`` plugin used to default to building both HTML and man page output. This is no longer the case, and you should explicitly set ``builders`` to ``html man`` if you wish to retain this behavior. .. deprecated:: 4.2 This feature has been superseded by the `sphinxcontrib-apidoc`_ (for generation of API documentation) and :ref:`pbr.sphinxext` (for configuration of versioning via package metadata) extensions. It will be removed in a future release. The ``build_sphinx`` section is a version of the ``build_sphinx`` *setuptools* plugin provided with Sphinx. This plugin extends the original plugin to add the following: - Automatic generation of module documentation using the ``sphinx-apidoc`` tool - Automatic configuration of the ``project``, ``version`` and ``release`` settings using information from *pbr* itself - Support for multiple builders using the ``builders`` configuration option .. note:: Only applies to Sphinx < 1.6. See documentation on ``builders`` below. The version of ``build_sphinx`` provided by *pbr* provides a single additional option. ``builders`` A comma separated list of builders to run. For example, to build both HTML and man page documentation, you would define the following in your ``setup.cfg``: .. code-block:: ini [build_sphinx] builders = html,man source-dir = doc/source build-dir = doc/build all-files = 1 warning-is-error = 1 .. deprecated:: 3.2.0 Sphinx 1.6+ adds support for specifying multiple builders in the default ``builder`` option. You should use this option instead. Refer to the `Sphinx documentation`_ for more information. For information on the remaining options, refer to the `Sphinx documentation`_. In addition, the ``autodoc_index_modules``, ``autodoc_tree_index_modules``, ``autodoc_exclude_modules`` and ``autodoc_tree_excludes`` options :ref:`in the pbr section ` will affect the output of the automatic module documentation generation. .. _Sphinx documentation: http://www.sphinx-doc.org/en/stable/setuptools.html Requirements ------------ Requirements files are used in place of the ``install_requires`` and ``extras_require`` attributes. Requirement files should be given one of the below names. This order is also the order that the requirements are tried in: * ``requirements.txt`` * ``tools/pip-requires`` Only the first file found is used to install the list of packages it contains. .. versionchanged:: 5.0 Previously you could specify requirements for a given major version of Python using requirements files with a ``-pyN`` suffix. This was deprecated in 4.0 and removed in 5.0 in favour of environment markers. .. _extra-requirements: Extra requirements ~~~~~~~~~~~~~~~~~~ Groups of optional dependencies, or `"extra" requirements`__, can be described in your ``setup.cfg``, rather than needing to be added to ``setup.py``. An example (which also demonstrates the use of environment markers) is shown below. __ https://www.python.org/dev/peps/pep-0426/#extras-optional-dependencies Environment markers ~~~~~~~~~~~~~~~~~~~ Environment markers are `conditional dependencies`__ which can be added to the requirements (or to a group of extra requirements) automatically, depending on the environment the installer is running in. They can be added to requirements in the requirements file, or to extras defined in ``setup.cfg``, but the format is slightly different for each. For ``requirements.txt``:: argparse; python_version=='2.6' This will result in the package depending on ``argparse`` only if it's being installed into Python 2.6. For extras specified in ``setup.cfg``, add an ``extras`` section. For instance, to create two groups of extra requirements with additional constraints on the environment, you can use:: [extras] security = aleph bet:python_version=='3.2' gimel:python_version=='2.7' testing = quux:python_version=='2.7' __ https://www.python.org/dev/peps/pep-0426/#environment-markers Testing ------- .. deprecated:: 4.0 As described in :doc:`/user/features`, *pbr* may override the ``test`` command depending on the test runner used. A typical usage would be in ``tox.ini`` such as:: [tox] minversion = 2.0 skipsdist = True envlist = py33,py34,py35,py26,py27,pypy,pep8,docs [testenv] usedevelop = True setenv = VIRTUAL_ENV={envdir} CLIENT_NAME=pbr deps = . -r{toxinidir}/test-requirements.txt commands = python setup.py test --testr-args='{posargs}' The argument ``--coverage`` will set ``PYTHON`` to ``coverage run`` to produce a coverage report. ``--coverage-package-name`` can be used to modify or narrow the packages traced. Sphinx ``conf.py`` ------------------ As described in :doc:`/user/features`, *pbr* provides a Sphinx extension to automatically configure the version numbers for your documentation using *pbr* metadata. To enable this extension, you must add it to the list of extensions in your ``conf.py`` file:: extensions = [ 'pbr.sphinxext', # ... other extensions ] You should also unset/remove the ``version`` and ``release`` attributes from this file. .. _setuptools: http://www.sphinx-doc.org/en/stable/setuptools.html .. _sphinxcontrib-apidoc: https://pypi.org/project/sphinxcontrib-apidoc/ .. _reno: https://docs.openstack.org/reno/latest/ ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8939602 pbr-5.11.1/pbr/0000775000175000017500000000000000000000000013170 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/__init__.py0000664000175000017500000000000000000000000015267 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/build.py0000664000175000017500000000326700000000000014651 0ustar00zuulzuul00000000000000# Copyright 2021 Monty Taylor # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """pep-517 support Add:: [build-system] requires = ["pbr>=5.7.0", "setuptools>=36.6.0", "wheel"] build-backend = "pbr.build" to pyproject.toml to use this """ from setuptools import build_meta __all__ = [ 'get_requires_for_build_sdist', 'get_requires_for_build_wheel', 'prepare_metadata_for_build_wheel', 'build_wheel', 'build_sdist', ] def get_requires_for_build_wheel(config_settings=None): return build_meta.get_requires_for_build_wheel(config_settings) def get_requires_for_build_sdist(config_settings=None): return build_meta.get_requires_for_build_sdist(config_settings) def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None): return build_meta.prepare_metadata_for_build_wheel( metadata_directory, config_settings) def build_wheel( wheel_directory, config_settings=None, metadata_directory=None, ): return build_meta.build_wheel( wheel_directory, config_settings, metadata_directory, ) def build_sdist(sdist_directory, config_settings=None): return build_meta.build_sdist(sdist_directory, config_settings) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/builddoc.py0000664000175000017500000002721300000000000015334 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from distutils import log import fnmatch import os import sys try: import cStringIO except ImportError: import io as cStringIO try: import sphinx # NOTE(dhellmann): Newer versions of Sphinx have moved the apidoc # module into sphinx.ext and the API is slightly different (the # function expects sys.argv[1:] instead of sys.argv[:]. So, figure # out where we can import it from and set a flag so we can invoke # it properly. See this change in sphinx for details: # https://github.com/sphinx-doc/sphinx/commit/87630c8ae8bff8c0e23187676e6343d8903003a6 try: from sphinx.ext import apidoc apidoc_use_padding = False except ImportError: from sphinx import apidoc apidoc_use_padding = True from sphinx import application from sphinx import setup_command except Exception as e: # NOTE(dhellmann): During the installation of docutils, setuptools # tries to import pbr code to find the egg_info.writer hooks. That # imports this module, which imports sphinx, which imports # docutils, which is being installed. Because docutils uses 2to3 # to convert its code during installation under python 3, the # import fails, but it fails with an error other than ImportError # (today it's a NameError on StandardError, an exception base # class). Convert the exception type here so it can be caught in # packaging.py where we try to determine if we can import and use # sphinx by importing this module. See bug #1403510 for details. raise ImportError(str(e)) from pbr import git from pbr import options from pbr import version _deprecated_options = ['autodoc_tree_index_modules', 'autodoc_index_modules', 'autodoc_tree_excludes', 'autodoc_exclude_modules'] _deprecated_envs = ['AUTODOC_TREE_INDEX_MODULES', 'AUTODOC_INDEX_MODULES'] _rst_template = """%(heading)s %(underline)s .. automodule:: %(module)s :members: :undoc-members: :show-inheritance: """ def _find_modules(arg, dirname, files): for filename in files: if filename.endswith('.py') and filename != '__init__.py': arg["%s.%s" % (dirname.replace('/', '.'), filename[:-3])] = True class LocalBuildDoc(setup_command.BuildDoc): builders = ['html'] command_name = 'build_sphinx' sphinx_initialized = False def _get_source_dir(self): option_dict = self.distribution.get_option_dict('build_sphinx') pbr_option_dict = self.distribution.get_option_dict('pbr') _, api_doc_dir = pbr_option_dict.get('api_doc_dir', (None, 'api')) if 'source_dir' in option_dict: source_dir = os.path.join(option_dict['source_dir'][1], api_doc_dir) else: source_dir = 'doc/source/' + api_doc_dir if not os.path.exists(source_dir): os.makedirs(source_dir) return source_dir def generate_autoindex(self, excluded_modules=None): log.info("[pbr] Autodocumenting from %s" % os.path.abspath(os.curdir)) modules = {} source_dir = self._get_source_dir() for pkg in self.distribution.packages: if '.' not in pkg: for dirpath, dirnames, files in os.walk(pkg): _find_modules(modules, dirpath, files) def include(module): return not any(fnmatch.fnmatch(module, pat) for pat in excluded_modules) module_list = sorted(mod for mod in modules.keys() if include(mod)) autoindex_filename = os.path.join(source_dir, 'autoindex.rst') with open(autoindex_filename, 'w') as autoindex: autoindex.write(""".. toctree:: :maxdepth: 1 """) for module in module_list: output_filename = os.path.join(source_dir, "%s.rst" % module) heading = "The :mod:`%s` Module" % module underline = "=" * len(heading) values = dict(module=module, heading=heading, underline=underline) log.info("[pbr] Generating %s" % output_filename) with open(output_filename, 'w') as output_file: output_file.write(_rst_template % values) autoindex.write(" %s.rst\n" % module) def _sphinx_tree(self): source_dir = self._get_source_dir() cmd = ['-H', 'Modules', '-o', source_dir, '.'] if apidoc_use_padding: cmd.insert(0, 'apidoc') apidoc.main(cmd + self.autodoc_tree_excludes) def _sphinx_run(self): if not self.verbose: status_stream = cStringIO.StringIO() else: status_stream = sys.stdout confoverrides = {} if self.project: confoverrides['project'] = self.project if self.version: confoverrides['version'] = self.version if self.release: confoverrides['release'] = self.release if self.today: confoverrides['today'] = self.today if self.sphinx_initialized: confoverrides['suppress_warnings'] = [ 'app.add_directive', 'app.add_role', 'app.add_generic_role', 'app.add_node', 'image.nonlocal_uri', ] app = application.Sphinx( self.source_dir, self.config_dir, self.builder_target_dir, self.doctree_dir, self.builder, confoverrides, status_stream, freshenv=self.fresh_env, warningiserror=self.warning_is_error) self.sphinx_initialized = True try: app.build(force_all=self.all_files) except Exception as err: from docutils import utils if isinstance(err, utils.SystemMessage): sys.stder.write('reST markup error:\n') sys.stderr.write(err.args[0].encode('ascii', 'backslashreplace')) sys.stderr.write('\n') else: raise if self.link_index: src = app.config.master_doc + app.builder.out_suffix dst = app.builder.get_outfilename('index') os.symlink(src, dst) def run(self): option_dict = self.distribution.get_option_dict('pbr') # TODO(stephenfin): Remove this (and the entire file) when 5.0 is # released warn_opts = set(option_dict.keys()).intersection(_deprecated_options) warn_env = list(filter(lambda x: os.getenv(x), _deprecated_envs)) if warn_opts or warn_env: msg = ('The autodoc and autodoc_tree features are deprecated in ' '4.2 and will be removed in a future release. You should ' 'use the sphinxcontrib-apidoc Sphinx extension instead. ' 'Refer to the pbr documentation for more information.') if warn_opts: msg += ' Deprecated options: %s' % list(warn_opts) if warn_env: msg += ' Deprecated environment variables: %s' % warn_env log.warn(msg) if git._git_is_installed(): git.write_git_changelog(option_dict=option_dict) git.generate_authors(option_dict=option_dict) tree_index = options.get_boolean_option(option_dict, 'autodoc_tree_index_modules', 'AUTODOC_TREE_INDEX_MODULES') auto_index = options.get_boolean_option(option_dict, 'autodoc_index_modules', 'AUTODOC_INDEX_MODULES') if not os.getenv('SPHINX_DEBUG'): # NOTE(afazekas): These options can be used together, # but they do a very similar thing in a different way if tree_index: self._sphinx_tree() if auto_index: self.generate_autoindex( set(option_dict.get( "autodoc_exclude_modules", [None, ""])[1].split())) self.finalize_options() is_multibuilder_sphinx = version.SemanticVersion.from_pip_string( sphinx.__version__) >= version.SemanticVersion(1, 6) # TODO(stephenfin): Remove support for Sphinx < 1.6 in 4.0 if not is_multibuilder_sphinx: log.warn('[pbr] Support for Sphinx < 1.6 will be dropped in ' 'pbr 4.0. Upgrade to Sphinx 1.6+') # TODO(stephenfin): Remove this at the next MAJOR version bump if self.builders != ['html']: log.warn("[pbr] Sphinx 1.6 added native support for " "specifying multiple builders in the " "'[sphinx_build] builder' configuration option, " "found in 'setup.cfg'. As a result, the " "'[sphinx_build] builders' option has been " "deprecated and will be removed in pbr 4.0. Migrate " "to the 'builder' configuration option.") if is_multibuilder_sphinx: self.builder = self.builders if is_multibuilder_sphinx: # Sphinx >= 1.6 return setup_command.BuildDoc.run(self) # Sphinx < 1.6 for builder in self.builders: self.builder = builder self.finalize_options() self._sphinx_run() def initialize_options(self): # Not a new style class, super keyword does not work. setup_command.BuildDoc.initialize_options(self) # NOTE(dstanek): exclude setup.py from the autodoc tree index # builds because all projects will have an issue with it self.autodoc_tree_excludes = ['setup.py'] def finalize_options(self): from pbr import util # Not a new style class, super keyword does not work. setup_command.BuildDoc.finalize_options(self) # Handle builder option from command line - override cfg option_dict = self.distribution.get_option_dict('build_sphinx') if 'command line' in option_dict.get('builder', [[]])[0]: self.builders = option_dict['builder'][1] # Allow builders to be configurable - as a comma separated list. if not isinstance(self.builders, list) and self.builders: self.builders = self.builders.split(',') self.project = self.distribution.get_name() self.version = self.distribution.get_version() self.release = self.distribution.get_version() # NOTE(dstanek): check for autodoc tree exclusion overrides # in the setup.cfg opt = 'autodoc_tree_excludes' option_dict = self.distribution.get_option_dict('pbr') if opt in option_dict: self.autodoc_tree_excludes = util.split_multiline( option_dict[opt][1]) # handle Sphinx < 1.5.0 if not hasattr(self, 'warning_is_error'): self.warning_is_error = False ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8979604 pbr-5.11.1/pbr/cmd/0000775000175000017500000000000000000000000013733 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/cmd/__init__.py0000664000175000017500000000000000000000000016032 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/cmd/main.py0000664000175000017500000000715700000000000015243 0ustar00zuulzuul00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import json import sys import pkg_resources import pbr.version def _get_metadata(package_name): try: return json.loads( pkg_resources.get_distribution( package_name).get_metadata('pbr.json')) except pkg_resources.DistributionNotFound: raise Exception('Package {0} not installed'.format(package_name)) except Exception: return None def get_sha(args): sha = _get_info(args.name)['sha'] if sha: print(sha) def get_info(args): if args.short: print("{version}".format(**_get_info(args.name))) else: print("{name}\t{version}\t{released}\t{sha}".format( **_get_info(args.name))) def _get_info(name): metadata = _get_metadata(name) version = pkg_resources.get_distribution(name).version if metadata: if metadata['is_release']: released = 'released' else: released = 'pre-release' sha = metadata['git_version'] else: version_parts = version.split('.') if version_parts[-1].startswith('g'): sha = version_parts[-1][1:] released = 'pre-release' else: sha = "" released = "released" for part in version_parts: if not part.isdigit(): released = "pre-release" return dict(name=name, version=version, sha=sha, released=released) def freeze(args): sorted_dists = sorted(pkg_resources.working_set, key=lambda dist: dist.project_name.lower()) for dist in sorted_dists: info = _get_info(dist.project_name) output = "{name}=={version}".format(**info) if info['sha']: output += " # git sha {sha}".format(**info) print(output) def main(): parser = argparse.ArgumentParser( description='pbr: Python Build Reasonableness') parser.add_argument( '-v', '--version', action='version', version=str(pbr.version.VersionInfo('pbr'))) subparsers = parser.add_subparsers( title='commands', description='valid commands', help='additional help', dest='cmd') subparsers.required = True cmd_sha = subparsers.add_parser('sha', help='print sha of package') cmd_sha.set_defaults(func=get_sha) cmd_sha.add_argument('name', help='package to print sha of') cmd_info = subparsers.add_parser( 'info', help='print version info for package') cmd_info.set_defaults(func=get_info) cmd_info.add_argument('name', help='package to print info of') cmd_info.add_argument('-s', '--short', action="store_true", help='only display package version') cmd_freeze = subparsers.add_parser( 'freeze', help='print version info for all installed packages') cmd_freeze.set_defaults(func=freeze) args = parser.parse_args() try: args.func(args) except Exception as e: print(e) if __name__ == '__main__': sys.exit(main()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/core.py0000664000175000017500000001434600000000000014502 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. import logging import os import sys import warnings from distutils import errors from pbr import util if sys.version_info[0] == 3: string_type = str integer_types = (int,) else: string_type = basestring # noqa integer_types = (int, long) # noqa def pbr(dist, attr, value): """Implements the actual pbr setup() keyword. When used, this should be the only keyword in your setup() aside from `setup_requires`. If given as a string, the value of pbr is assumed to be the relative path to the setup.cfg file to use. Otherwise, if it evaluates to true, it simply assumes that pbr should be used, and the default 'setup.cfg' is used. This works by reading the setup.cfg file, parsing out the supported metadata and command options, and using them to rebuild the `DistributionMetadata` object and set the newly added command options. The reason for doing things this way is that a custom `Distribution` class will not play nicely with setup_requires; however, this implementation may not work well with distributions that do use a `Distribution` subclass. """ # Distribution.finalize_options() is what calls this method. That means # there is potential for recursion here. Recursion seems to be an issue # particularly when using PEP517 build-system configs without # setup_requires in setup.py. We can avoid the recursion by setting # this canary so we don't repeat ourselves. if hasattr(dist, '_pbr_initialized'): return dist._pbr_initialized = True if not value: return if isinstance(value, string_type): path = os.path.abspath(value) else: path = os.path.abspath('setup.cfg') if not os.path.exists(path): raise errors.DistutilsFileError( 'The setup.cfg file %s does not exist.' % path) # Converts the setup.cfg file to setup() arguments try: attrs = util.cfg_to_args(path, dist.script_args) except Exception: e = sys.exc_info()[1] # NB: This will output to the console if no explicit logging has # been setup - but thats fine, this is a fatal distutils error, so # being pretty isn't the #1 goal.. being diagnosable is. logging.exception('Error parsing') raise errors.DistutilsSetupError( 'Error parsing %s: %s: %s' % (path, e.__class__.__name__, e)) # There are some metadata fields that are only supported by # setuptools and not distutils, and hence are not in # dist.metadata. We are OK to write these in. For gory details # see # https://github.com/pypa/setuptools/pull/1343 _DISTUTILS_UNSUPPORTED_METADATA = ( 'long_description_content_type', 'project_urls', 'provides_extras' ) # Repeat some of the Distribution initialization code with the newly # provided attrs if attrs: # Skips 'options' and 'licence' support which are rarely used; may # add back in later if demanded for key, val in attrs.items(): if hasattr(dist.metadata, 'set_' + key): getattr(dist.metadata, 'set_' + key)(val) elif hasattr(dist.metadata, key): setattr(dist.metadata, key, val) elif hasattr(dist, key): setattr(dist, key, val) elif key in _DISTUTILS_UNSUPPORTED_METADATA: setattr(dist.metadata, key, val) else: msg = 'Unknown distribution option: %s' % repr(key) warnings.warn(msg) # Re-finalize the underlying Distribution try: super(dist.__class__, dist).finalize_options() except TypeError: # If dist is not declared as a new-style class (with object as # a subclass) then super() will not work on it. This is the case # for Python 2. In that case, fall back to doing this the ugly way dist.__class__.__bases__[-1].finalize_options(dist) # This bit comes out of distribute/setuptools if isinstance(dist.metadata.version, integer_types + (float,)): # Some people apparently take "version number" too literally :) dist.metadata.version = str(dist.metadata.version) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/extra_files.py0000664000175000017500000000211000000000000016041 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from distutils import errors import os _extra_files = [] def get_extra_files(): global _extra_files return _extra_files def set_extra_files(extra_files): # Let's do a sanity check for filename in extra_files: if not os.path.exists(filename): raise errors.DistutilsFileError( '%s from the extra_files option in setup.cfg does not ' 'exist' % filename) global _extra_files _extra_files[:] = extra_files[:] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/find_package.py0000664000175000017500000000202300000000000016132 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import setuptools def smart_find_packages(package_list): """Run find_packages the way we intend.""" packages = [] for pkg in package_list.strip().split("\n"): pkg_path = pkg.replace('.', os.path.sep) packages.append(pkg) packages.extend(['%s.%s' % (pkg, f) for f in setuptools.find_packages(pkg_path)]) return "\n".join(set(packages)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/git.py0000664000175000017500000002647400000000000014342 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import distutils.errors from distutils import log import errno import io import os import re import subprocess import time import pkg_resources from pbr import options from pbr import version def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None): if buffer: out_location = subprocess.PIPE err_location = subprocess.PIPE else: out_location = None err_location = None newenv = os.environ.copy() if env: newenv.update(env) output = subprocess.Popen(cmd, stdout=out_location, stderr=err_location, env=newenv) out = output.communicate() if output.returncode and throw_on_error: raise distutils.errors.DistutilsError( "%s returned %d" % (cmd, output.returncode)) if len(out) == 0 or not out[0] or not out[0].strip(): return '' # Since we don't control the history, and forcing users to rebase arbitrary # history to fix utf8 issues is harsh, decode with replace. return out[0].strip().decode('utf-8', 'replace') def _run_git_command(cmd, git_dir, **kwargs): if not isinstance(cmd, (list, tuple)): cmd = [cmd] return _run_shell_command( ['git', '--git-dir=%s' % git_dir] + cmd, **kwargs) def _get_git_directory(): try: return _run_shell_command(['git', 'rev-parse', '--git-dir']) except OSError as e: if e.errno == errno.ENOENT: # git not installed. return '' raise def _git_is_installed(): try: # We cannot use 'which git' as it may not be available # in some distributions, So just try 'git --version' # to see if we run into trouble _run_shell_command(['git', '--version']) except OSError: return False return True def _get_highest_tag(tags): """Find the highest tag from a list. Pass in a list of tag strings and this will return the highest (latest) as sorted by the pkg_resources version parser. """ return max(tags, key=pkg_resources.parse_version) def _find_git_files(dirname='', git_dir=None): """Behave like a file finder entrypoint plugin. We don't actually use the entrypoints system for this because it runs at absurd times. We only want to do this when we are building an sdist. """ file_list = [] if git_dir is None: git_dir = _run_git_functions() if git_dir: log.info("[pbr] In git context, generating filelist from git") file_list = _run_git_command(['ls-files', '-z'], git_dir) # Users can fix utf8 issues locally with a single commit, so we are # strict here. file_list = file_list.split(b'\x00'.decode('utf-8')) return [f for f in file_list if f] def _get_raw_tag_info(git_dir): describe = _run_git_command(['describe', '--always'], git_dir) if "-" in describe: return describe.rsplit("-", 2)[-2] if "." in describe: return 0 return None def get_is_release(git_dir): return _get_raw_tag_info(git_dir) == 0 def _run_git_functions(): git_dir = None if _git_is_installed(): git_dir = _get_git_directory() return git_dir or None def get_git_short_sha(git_dir=None): """Return the short sha for this repo, if it exists.""" if not git_dir: git_dir = _run_git_functions() if git_dir: return _run_git_command( ['log', '-n1', '--pretty=format:%h'], git_dir) return None def _clean_changelog_message(msg): """Cleans any instances of invalid sphinx wording. This escapes/removes any instances of invalid characters that can be interpreted by sphinx as a warning or error when translating the Changelog into an HTML file for documentation building within projects. * Escapes '_' which is interpreted as a link * Escapes '*' which is interpreted as a new line * Escapes '`' which is interpreted as a literal """ msg = msg.replace('*', r'\*') msg = msg.replace('_', r'\_') msg = msg.replace('`', r'\`') return msg def _iter_changelog(changelog): """Convert a oneline log iterator to formatted strings. :param changelog: An iterator of one line log entries like that given by _iter_log_oneline. :return: An iterator over (release, formatted changelog) tuples. """ first_line = True current_release = None yield current_release, "CHANGES\n=======\n\n" for hash, tags, msg in changelog: if tags: current_release = _get_highest_tag(tags) underline = len(current_release) * '-' if not first_line: yield current_release, '\n' yield current_release, ( "%(tag)s\n%(underline)s\n\n" % dict(tag=current_release, underline=underline)) if not msg.startswith("Merge "): if msg.endswith("."): msg = msg[:-1] msg = _clean_changelog_message(msg) yield current_release, "* %(msg)s\n" % dict(msg=msg) first_line = False def _iter_log_oneline(git_dir=None): """Iterate over --oneline log entries if possible. This parses the output into a structured form but does not apply presentation logic to the output - making it suitable for different uses. :return: An iterator of (hash, tags_set, 1st_line) tuples, or None if changelog generation is disabled / not available. """ if git_dir is None: git_dir = _get_git_directory() if not git_dir: return [] return _iter_log_inner(git_dir) def _is_valid_version(candidate): try: version.SemanticVersion.from_pip_string(candidate) return True except ValueError: return False def _iter_log_inner(git_dir): """Iterate over --oneline log entries. This parses the output intro a structured form but does not apply presentation logic to the output - making it suitable for different uses. .. caution:: this function risk to return a tag that doesn't exist really inside the git objects list due to replacement made to tag name to also list pre-release suffix. Compliant with the SemVer specification (e.g 1.2.3-rc1) :return: An iterator of (hash, tags_set, 1st_line) tuples. """ log.info('[pbr] Generating ChangeLog') log_cmd = ['log', '--decorate=full', '--format=%h%x00%s%x00%d'] changelog = _run_git_command(log_cmd, git_dir) for line in changelog.split('\n'): line_parts = line.split('\x00') if len(line_parts) != 3: continue sha, msg, refname = line_parts tags = set() # refname can be: # # HEAD, tag: refs/tags/1.4.0, refs/remotes/origin/master, \ # refs/heads/master # refs/tags/1.3.4 if "refs/tags/" in refname: refname = refname.strip()[1:-1] # remove wrapping ()'s # If we start with "tag: refs/tags/1.2b1, tag: refs/tags/1.2" # The first split gives us "['', '1.2b1, tag:', '1.2']" # Which is why we do the second split below on the comma for tag_string in refname.split("refs/tags/")[1:]: # git tag does not allow : or " " in tag names, so we split # on ", " which is the separator between elements candidate = tag_string.split(", ")[0].replace("-", ".") if _is_valid_version(candidate): tags.add(candidate) yield sha, tags, msg def write_git_changelog(git_dir=None, dest_dir=os.path.curdir, option_dict=None, changelog=None): """Write a changelog based on the git changelog.""" start = time.time() if not option_dict: option_dict = {} should_skip = options.get_boolean_option(option_dict, 'skip_changelog', 'SKIP_WRITE_GIT_CHANGELOG') if should_skip: return if not changelog: changelog = _iter_log_oneline(git_dir=git_dir) if changelog: changelog = _iter_changelog(changelog) if not changelog: return new_changelog = os.path.join(dest_dir, 'ChangeLog') if os.path.exists(new_changelog) and not os.access(new_changelog, os.W_OK): # If there's already a ChangeLog and it's not writable, just use it log.info('[pbr] ChangeLog not written (file already' ' exists and it is not writeable)') return log.info('[pbr] Writing ChangeLog') with io.open(new_changelog, "w", encoding="utf-8") as changelog_file: for release, content in changelog: changelog_file.write(content) stop = time.time() log.info('[pbr] ChangeLog complete (%0.1fs)' % (stop - start)) def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()): """Create AUTHORS file using git commits.""" should_skip = options.get_boolean_option(option_dict, 'skip_authors', 'SKIP_GENERATE_AUTHORS') if should_skip: return start = time.time() old_authors = os.path.join(dest_dir, 'AUTHORS.in') new_authors = os.path.join(dest_dir, 'AUTHORS') if os.path.exists(new_authors) and not os.access(new_authors, os.W_OK): # If there's already an AUTHORS file and it's not writable, just use it return log.info('[pbr] Generating AUTHORS') ignore_emails = '((jenkins|zuul)@review|infra@lists|jenkins@openstack)' if git_dir is None: git_dir = _get_git_directory() if git_dir: authors = [] # don't include jenkins email address in AUTHORS file git_log_cmd = ['log', '--format=%aN <%aE>'] authors += _run_git_command(git_log_cmd, git_dir).split('\n') authors = [a for a in authors if not re.search(ignore_emails, a)] # get all co-authors from commit messages co_authors_out = _run_git_command('log', git_dir) co_authors = re.findall('Co-authored-by:.+', co_authors_out, re.MULTILINE) co_authors = [signed.split(":", 1)[1].strip() for signed in co_authors if signed] authors += co_authors authors = sorted(set(authors)) with open(new_authors, 'wb') as new_authors_fh: if os.path.exists(old_authors): with open(old_authors, "rb") as old_authors_fh: new_authors_fh.write(old_authors_fh.read()) new_authors_fh.write(('\n'.join(authors) + '\n') .encode('utf-8')) stop = time.time() log.info('[pbr] AUTHORS complete (%0.1fs)' % (stop - start)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8979604 pbr-5.11.1/pbr/hooks/0000775000175000017500000000000000000000000014313 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/__init__.py0000664000175000017500000000207600000000000016431 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr.hooks import backwards from pbr.hooks import commands from pbr.hooks import files from pbr.hooks import metadata def setup_hook(config): """Filter config parsed from a setup.cfg to inject our defaults.""" metadata_config = metadata.MetadataConfig(config) metadata_config.run() backwards.BackwardsCompatConfig(config).run() commands.CommandsConfig(config).run() files.FilesConfig(config, metadata_config.get_name()).run() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/backwards.py0000664000175000017500000000223000000000000016623 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr.hooks import base from pbr import packaging class BackwardsCompatConfig(base.BaseConfig): section = 'backwards_compat' def hook(self): self.config['include_package_data'] = 'True' packaging.append_text_list( self.config, 'dependency_links', packaging.parse_dependency_links()) packaging.append_text_list( self.config, 'tests_require', packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES, strip_markers=True)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/base.py0000664000175000017500000000201600000000000015576 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class BaseConfig(object): section = None def __init__(self, config): self._global_config = config self.config = self._global_config.get(self.section, dict()) self.pbr_config = config.get('pbr', dict()) def run(self): self.hook() self.save() def hook(self): pass def save(self): self._global_config[self.section] = self.config ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/commands.py0000664000175000017500000000465400000000000016477 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from setuptools.command import easy_install from pbr.hooks import base from pbr import options from pbr import packaging class CommandsConfig(base.BaseConfig): section = 'global' def __init__(self, config): super(CommandsConfig, self).__init__(config) self.commands = self.config.get('commands', "") def save(self): self.config['commands'] = self.commands super(CommandsConfig, self).save() def add_command(self, command): self.commands = "%s\n%s" % (self.commands, command) def hook(self): self.add_command('pbr.packaging.LocalEggInfo') self.add_command('pbr.packaging.LocalSDist') self.add_command('pbr.packaging.LocalInstallScripts') self.add_command('pbr.packaging.LocalDevelop') self.add_command('pbr.packaging.LocalRPMVersion') self.add_command('pbr.packaging.LocalDebVersion') if os.name != 'nt': easy_install.get_script_args = packaging.override_get_script_args if packaging.have_sphinx(): self.add_command('pbr.builddoc.LocalBuildDoc') if os.path.exists('.testr.conf') and packaging.have_testr(): # There is a .testr.conf file. We want to use it. self.add_command('pbr.packaging.TestrTest') elif self.config.get('nosetests', False) and packaging.have_nose(): # We seem to still have nose configured self.add_command('pbr.packaging.NoseTest') use_egg = options.get_boolean_option( self.pbr_config, 'use-egg', 'PBR_USE_EGG') # We always want non-egg install unless explicitly requested if 'manpages' in self.pbr_config or not use_egg: self.add_command('pbr.packaging.LocalInstall') else: self.add_command('pbr.packaging.InstallWithGit') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/files.py0000664000175000017500000001121100000000000015763 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import shlex import sys from pbr import find_package from pbr.hooks import base def get_manpath(): manpath = 'share/man' if os.path.exists(os.path.join(sys.prefix, 'man')): # This works around a bug with install where it expects every node # in the relative data directory to be an actual directory, since at # least Debian derivatives (and probably other platforms as well) # like to symlink Unixish /usr/local/man to /usr/local/share/man. manpath = 'man' return manpath def get_man_section(section): return os.path.join(get_manpath(), 'man%s' % section) def unquote_path(path): # unquote the full path, e.g: "'a/full/path'" becomes "a/full/path", also # strip the quotes off individual path components because os.walk cannot # handle paths like: "'i like spaces'/'another dir'", so we will pass it # "i like spaces/another dir" instead. if os.name == 'nt': # shlex cannot handle paths that contain backslashes, treating those # as escape characters. path = path.replace("\\", "/") return "".join(shlex.split(path)).replace("/", "\\") return "".join(shlex.split(path)) class FilesConfig(base.BaseConfig): section = 'files' def __init__(self, config, name): super(FilesConfig, self).__init__(config) self.name = name self.data_files = self.config.get('data_files', '') def save(self): self.config['data_files'] = self.data_files super(FilesConfig, self).save() def expand_globs(self): finished = [] for line in self.data_files.split("\n"): if line.rstrip().endswith('*') and '=' in line: (target, source_glob) = line.split('=') source_prefix = source_glob.strip()[:-1] target = target.strip() if not target.endswith(os.path.sep): target += os.path.sep unquoted_prefix = unquote_path(source_prefix) unquoted_target = unquote_path(target) for (dirpath, dirnames, fnames) in os.walk(unquoted_prefix): # As source_prefix is always matched, using replace with a # a limit of one is always going to replace the path prefix # and not accidentally replace some text in the middle of # the path new_prefix = dirpath.replace(unquoted_prefix, unquoted_target, 1) finished.append("'%s' = " % new_prefix) finished.extend( [" '%s'" % os.path.join(dirpath, f) for f in fnames]) else: finished.append(line) self.data_files = "\n".join(finished) def add_man_path(self, man_path): self.data_files = "%s\n'%s' =" % (self.data_files, man_path) def add_man_page(self, man_page): self.data_files = "%s\n '%s'" % (self.data_files, man_page) def get_man_sections(self): man_sections = dict() manpages = self.pbr_config['manpages'] for manpage in manpages.split(): section_number = manpage.strip()[-1] section = man_sections.get(section_number, list()) section.append(manpage.strip()) man_sections[section_number] = section return man_sections def hook(self): packages = self.config.get('packages', self.name).strip() expanded = [] for pkg in packages.split("\n"): if os.path.isdir(pkg.strip()): expanded.append(find_package.smart_find_packages(pkg.strip())) self.config['packages'] = "\n".join(expanded) self.expand_globs() if 'manpages' in self.pbr_config: man_sections = self.get_man_sections() for (section, pages) in man_sections.items(): manpath = get_man_section(section) self.add_man_path(manpath) for page in pages: self.add_man_page(page) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/hooks/metadata.py0000664000175000017500000000206400000000000016447 0ustar00zuulzuul00000000000000# Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr.hooks import base from pbr import packaging class MetadataConfig(base.BaseConfig): section = 'metadata' def hook(self): self.config['version'] = packaging.get_version( self.config['name'], self.config.get('version', None)) packaging.append_text_list( self.config, 'requires_dist', packaging.parse_requirements()) def get_name(self): return self.config['name'] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/options.py0000664000175000017500000000450300000000000015237 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. import os TRUE_VALUES = ('true', '1', 'yes') def get_boolean_option(option_dict, option_name, env_name): return ((option_name in option_dict and option_dict[option_name][1].lower() in TRUE_VALUES) or str(os.getenv(env_name)).lower() in TRUE_VALUES) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/packaging.py0000664000175000017500000007536700000000000015510 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Utilities with minimum-depends for use in setup.py """ from __future__ import unicode_literals from distutils.command import install as du_install from distutils import log # (hberaud) do not use six here to import urlparse # to keep this module free from external dependencies # to avoid cross dependencies errors on minimal system # free from dependencies. try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse import email import email.errors import os import re import sys import warnings import pkg_resources import setuptools from setuptools.command import develop from setuptools.command import easy_install from setuptools.command import egg_info from setuptools.command import install from setuptools.command import install_scripts from setuptools.command import sdist from pbr import extra_files from pbr import git from pbr import options import pbr.pbr_json from pbr import testr_command from pbr import version REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires') PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in ( 'requirements-py%d.txt', 'tools/pip-requires-py%d')] TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires') def get_requirements_files(): files = os.environ.get("PBR_REQUIREMENTS_FILES") if files: return tuple(f.strip() for f in files.split(',')) # Returns a list composed of: # - REQUIREMENTS_FILES with -py2 or -py3 in the name # (e.g. requirements-py3.txt) # - REQUIREMENTS_FILES return PY_REQUIREMENTS_FILES + list(REQUIREMENTS_FILES) def append_text_list(config, key, text_list): """Append a \n separated list to possibly existing value.""" new_value = [] current_value = config.get(key, "") if current_value: new_value.append(current_value) new_value.extend(text_list) config[key] = '\n'.join(new_value) def _any_existing(file_list): return [f for f in file_list if os.path.exists(f)] # Get requirements from the first file that exists def get_reqs_from_files(requirements_files): existing = _any_existing(requirements_files) # TODO(stephenfin): Remove this in pbr 6.0+ deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES] if deprecated: warnings.warn('Support for \'-pyN\'-suffixed requirements files is ' 'removed in pbr 5.0 and these files are now ignored. ' 'Use environment markers instead. Conflicting files: ' '%r' % deprecated, DeprecationWarning) existing = [f for f in existing if f not in PY_REQUIREMENTS_FILES] for requirements_file in existing: with open(requirements_file, 'r') as fil: return fil.read().split('\n') return [] def egg_fragment(match): return re.sub(r'(?P[\w.-]+)-' r'(?P' r'(?P' r'(?P0|[1-9][0-9]*)\.' r'(?P0|[1-9][0-9]*)\.' r'(?P0|[1-9][0-9]*)){1}' r'(?P(?:\-' r'(?P(?:(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|' r'(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|' r'(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)(?:[0-9A-Za-z-]+)){1}' r'(?:\.(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|' r'\.(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|' r'\.(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)' r'(?:[0-9A-Za-z-]+))*){1}){0,1}(?:\+' r'(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))){0,1}))', r'\g>=\g', match.groups()[-1]) def parse_requirements(requirements_files=None, strip_markers=False): if requirements_files is None: requirements_files = get_requirements_files() requirements = [] for line in get_reqs_from_files(requirements_files): # Ignore comments if (not line.strip()) or line.startswith('#'): continue # Ignore index URL lines if re.match(r'^\s*(-i|--index-url|--extra-index-url|--find-links).*', line): continue # Handle nested requirements files such as: # -r other-requirements.txt if line.startswith('-r'): req_file = line.partition(' ')[2] requirements += parse_requirements( [req_file], strip_markers=strip_markers) continue try: project_name = pkg_resources.Requirement.parse(line).project_name except ValueError: project_name = None # For the requirements list, we need to inject only the portion # after egg= so that distutils knows the package it's looking for # such as: # -e git://github.com/openstack/nova/master#egg=nova # -e git://github.com/openstack/nova/master#egg=nova-1.2.3 # -e git+https://foo.com/zipball#egg=bar&subdirectory=baz # http://github.com/openstack/nova/zipball/master#egg=nova # http://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 # git+https://foo.com/zipball#egg=bar&subdirectory=baz # git+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 # hg+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 # svn+[proto]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 # -f lines are for index locations, and don't get used here if re.match(r'\s*-e\s+', line): extract = re.match(r'\s*-e\s+(.*)$', line) line = extract.group(1) egg = urlparse(line) if egg.scheme: line = re.sub(r'egg=([^&]+).*$', egg_fragment, egg.fragment) elif re.match(r'\s*-f\s+', line): line = None reason = 'Index Location' if line is not None: line = re.sub('#.*$', '', line) if strip_markers: semi_pos = line.find(';') if semi_pos < 0: semi_pos = None line = line[:semi_pos] requirements.append(line) else: log.info( '[pbr] Excluding %s: %s' % (project_name, reason)) return requirements def parse_dependency_links(requirements_files=None): if requirements_files is None: requirements_files = get_requirements_files() dependency_links = [] # dependency_links inject alternate locations to find packages listed # in requirements for line in get_reqs_from_files(requirements_files): # skip comments and blank lines if re.match(r'(\s*#)|(\s*$)', line): continue # lines with -e or -f need the whole line, minus the flag if re.match(r'\s*-[ef]\s+', line): dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) # lines that are only urls can go in unmolested elif re.match(r'^\s*(https?|git(\+(https|ssh))?|svn|hg)\S*:', line): dependency_links.append(line) return dependency_links class InstallWithGit(install.install): """Extracts ChangeLog and AUTHORS from git then installs. This is useful for e.g. readthedocs where the package is installed and then docs built. """ command_name = 'install' def run(self): _from_git(self.distribution) return install.install.run(self) class LocalInstall(install.install): """Runs python setup.py install in a sensible manner. Force a non-egg installed in the manner of single-version-externally-managed, which allows us to install manpages and config files. """ command_name = 'install' def run(self): _from_git(self.distribution) return du_install.install.run(self) class TestrTest(testr_command.Testr): """Make setup.py test do the right thing.""" command_name = 'test' description = 'DEPRECATED: Run unit tests using testr' def run(self): warnings.warn('testr integration is deprecated in pbr 4.2 and will ' 'be removed in a future release. Please call your test ' 'runner directly', DeprecationWarning) # Can't use super - base class old-style class testr_command.Testr.run(self) class LocalRPMVersion(setuptools.Command): __doc__ = """Output the rpm *compatible* version string of this package""" description = __doc__ user_options = [] command_name = "rpm_version" def run(self): log.info("[pbr] Extracting rpm version") name = self.distribution.get_name() print(version.VersionInfo(name).semantic_version().rpm_string()) def initialize_options(self): pass def finalize_options(self): pass class LocalDebVersion(setuptools.Command): __doc__ = """Output the deb *compatible* version string of this package""" description = __doc__ user_options = [] command_name = "deb_version" def run(self): log.info("[pbr] Extracting deb version") name = self.distribution.get_name() print(version.VersionInfo(name).semantic_version().debian_string()) def initialize_options(self): pass def finalize_options(self): pass def have_testr(): return testr_command.have_testr try: from nose import commands class NoseTest(commands.nosetests): """Fallback test runner if testr is a no-go.""" command_name = 'test' description = 'DEPRECATED: Run unit tests using nose' def run(self): warnings.warn('nose integration in pbr is deprecated. Please use ' 'the native nose setuptools configuration or call ' 'nose directly', DeprecationWarning) # Can't use super - base class old-style class commands.nosetests.run(self) _have_nose = True except ImportError: _have_nose = False def have_nose(): return _have_nose _wsgi_text = """#PBR Generated from %(group)r import threading from %(module_name)s import %(import_target)s if __name__ == "__main__": import argparse import socket import sys import wsgiref.simple_server as wss parser = argparse.ArgumentParser( description=%(import_target)s.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter, usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]') parser.add_argument('--port', '-p', type=int, default=8000, help='TCP port to listen on') parser.add_argument('--host', '-b', default='', help='IP to bind the server to') parser.add_argument('args', nargs=argparse.REMAINDER, metavar='-- [passed options]', help="'--' is the separator of the arguments used " "to start the WSGI server and the arguments passed " "to the WSGI application.") args = parser.parse_args() if args.args: if args.args[0] == '--': args.args.pop(0) else: parser.error("unrecognized arguments: %%s" %% ' '.join(args.args)) sys.argv[1:] = args.args server = wss.make_server(args.host, args.port, %(invoke_target)s()) print("*" * 80) print("STARTING test server %(module_name)s.%(invoke_target)s") url = "http://%%s:%%d/" %% (server.server_name, server.server_port) print("Available at %%s" %% url) print("DANGER! For testing only, do not use in production") print("*" * 80) sys.stdout.flush() server.serve_forever() else: application = None app_lock = threading.Lock() with app_lock: if application is None: application = %(invoke_target)s() """ _script_text = """# PBR Generated from %(group)r import sys from %(module_name)s import %(import_target)s if __name__ == "__main__": sys.exit(%(invoke_target)s()) """ # the following allows us to specify different templates per entry # point group when generating pbr scripts. ENTRY_POINTS_MAP = { 'console_scripts': _script_text, 'gui_scripts': _script_text, 'wsgi_scripts': _wsgi_text } def generate_script(group, entry_point, header, template): """Generate the script based on the template. :param str group: The entry-point group name, e.g., "console_scripts". :param str header: The first line of the script, e.g., "!#/usr/bin/env python". :param str template: The script template. :returns: The templated script content :rtype: str """ if not entry_point.attrs or len(entry_point.attrs) > 2: raise ValueError("Script targets must be of the form " "'func' or 'Class.class_method'.") script_text = template % dict( group=group, module_name=entry_point.module_name, import_target=entry_point.attrs[0], invoke_target='.'.join(entry_point.attrs), ) return header + script_text def override_get_script_args( dist, executable=os.path.normpath(sys.executable)): """Override entrypoints console_script.""" # get_script_header() is deprecated since Setuptools 12.0 try: header = easy_install.ScriptWriter.get_header("", executable) except AttributeError: header = easy_install.get_script_header("", executable) for group, template in ENTRY_POINTS_MAP.items(): for name, ep in dist.get_entry_map(group).items(): yield (name, generate_script(group, ep, header, template)) class LocalDevelop(develop.develop): command_name = 'develop' def install_wrapper_scripts(self, dist): if sys.platform == 'win32': return develop.develop.install_wrapper_scripts(self, dist) if not self.exclude_scripts: for args in override_get_script_args(dist): self.write_script(*args) class LocalInstallScripts(install_scripts.install_scripts): """Intercepts console scripts entry_points.""" command_name = 'install_scripts' def _make_wsgi_scripts_only(self, dist, executable): # get_script_header() is deprecated since Setuptools 12.0 try: header = easy_install.ScriptWriter.get_header("", executable) except AttributeError: header = easy_install.get_script_header("", executable) wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts'] for name, ep in dist.get_entry_map('wsgi_scripts').items(): content = generate_script( 'wsgi_scripts', ep, header, wsgi_script_template) self.write_script(name, content) def run(self): import distutils.command.install_scripts self.run_command("egg_info") if self.distribution.scripts: # run first to set up self.outfiles distutils.command.install_scripts.install_scripts.run(self) else: self.outfiles = [] ei_cmd = self.get_finalized_command("egg_info") dist = pkg_resources.Distribution( ei_cmd.egg_base, pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), ei_cmd.egg_name, ei_cmd.egg_version, ) bs_cmd = self.get_finalized_command('build_scripts') executable = getattr( bs_cmd, 'executable', easy_install.sys_executable) if 'bdist_wheel' in self.distribution.have_run: # We're building a wheel which has no way of generating mod_wsgi # scripts for us. Let's build them. # NOTE(sigmavirus24): This needs to happen here because, as the # comment below indicates, no_ep is True when building a wheel. self._make_wsgi_scripts_only(dist, executable) if self.no_ep: # no_ep is True if we're installing into an .egg file or building # a .whl file, in those cases, we do not want to build all of the # entry-points listed for this package. return if os.name != 'nt': get_script_args = override_get_script_args else: get_script_args = easy_install.get_script_args executable = '"%s"' % executable for args in get_script_args(dist, executable): self.write_script(*args) class LocalManifestMaker(egg_info.manifest_maker): """Add any files that are in git and some standard sensible files.""" def _add_pbr_defaults(self): for template_line in [ 'include AUTHORS', 'include ChangeLog', 'exclude .gitignore', 'exclude .gitreview', 'global-exclude *.pyc' ]: self.filelist.process_template_line(template_line) def add_defaults(self): """Add all the default files to self.filelist: Extends the functionality provided by distutils to also included additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog`` files generated by *pbr*. Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing; everything else is optional. """ option_dict = self.distribution.get_option_dict('pbr') sdist.sdist.add_defaults(self) self.filelist.append(self.template) self.filelist.append(self.manifest) self.filelist.extend(extra_files.get_extra_files()) should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist', 'SKIP_GIT_SDIST') if not should_skip: rcfiles = git._find_git_files() if rcfiles: self.filelist.extend(rcfiles) elif os.path.exists(self.manifest): self.read_manifest() ei_cmd = self.get_finalized_command('egg_info') self._add_pbr_defaults() self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) class LocalEggInfo(egg_info.egg_info): """Override the egg_info command to regenerate SOURCES.txt sensibly.""" command_name = 'egg_info' def find_sources(self): """Generate SOURCES.txt only if there isn't one already. If we are in an sdist command, then we always want to update SOURCES.txt. If we are not in an sdist command, then it doesn't matter one flip, and is actually destructive. However, if we're in a git context, it's always the right thing to do to recreate SOURCES.txt """ manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") if (not os.path.exists(manifest_filename) or os.path.exists('.git') or 'sdist' in sys.argv): log.info("[pbr] Processing SOURCES.txt") mm = LocalManifestMaker(self.distribution) mm.manifest = manifest_filename mm.run() self.filelist = mm.filelist else: log.info("[pbr] Reusing existing SOURCES.txt") self.filelist = egg_info.FileList() with open(manifest_filename, 'r') as fil: for entry in fil.read().split('\n'): self.filelist.append(entry) def _from_git(distribution): option_dict = distribution.get_option_dict('pbr') changelog = git._iter_log_oneline() if changelog: changelog = git._iter_changelog(changelog) git.write_git_changelog(option_dict=option_dict, changelog=changelog) git.generate_authors(option_dict=option_dict) class LocalSDist(sdist.sdist): """Builds the ChangeLog and Authors files from VC first.""" command_name = 'sdist' def checking_reno(self): """Ensure reno is installed and configured. We can't run reno-based commands if reno isn't installed/available, and don't want to if the user isn't using it. """ if hasattr(self, '_has_reno'): return self._has_reno option_dict = self.distribution.get_option_dict('pbr') should_skip = options.get_boolean_option(option_dict, 'skip_reno', 'SKIP_GENERATE_RENO') if should_skip: self._has_reno = False return False try: # versions of reno witout this module will not have the required # feature, hence the import from reno import setup_command # noqa except ImportError: log.info('[pbr] reno was not found or is too old. Skipping ' 'release notes') self._has_reno = False return False conf, output_file, cache_file = setup_command.load_config( self.distribution) if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)): log.info('[pbr] reno does not appear to be configured. Skipping ' 'release notes') self._has_reno = False return False self._files = [output_file, cache_file] log.info('[pbr] Generating release notes') self._has_reno = True return True sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands def run(self): _from_git(self.distribution) # sdist.sdist is an old style class, can't use super() sdist.sdist.run(self) def make_distribution(self): # This is included in make_distribution because setuptools doesn't use # 'get_file_list'. As such, this is the only hook point that runs after # the commands in 'sub_commands' if self.checking_reno(): self.filelist.extend(self._files) self.filelist.sort() sdist.sdist.make_distribution(self) try: from pbr import builddoc _have_sphinx = True # Import the symbols from their new home so the package API stays # compatible. LocalBuildDoc = builddoc.LocalBuildDoc except ImportError: _have_sphinx = False LocalBuildDoc = None def have_sphinx(): return _have_sphinx def _get_increment_kwargs(git_dir, tag): """Calculate the sort of semver increment needed from git history. Every commit from HEAD to tag is consider for Sem-Ver metadata lines. See the pbr docs for their syntax. :return: a dict of kwargs for passing into SemanticVersion.increment. """ result = {} if tag: version_spec = tag + "..HEAD" else: version_spec = "HEAD" # Get the raw body of the commit messages so that we don't have to # parse out any formatting whitespace and to avoid user settings on # git log output affecting out ability to have working sem ver headers. changelog = git._run_git_command(['log', '--pretty=%B', version_spec], git_dir) symbols = set() header = 'sem-ver:' for line in changelog.split("\n"): line = line.lower().strip() if not line.lower().strip().startswith(header): continue new_symbols = line[len(header):].strip().split(",") symbols.update([symbol.strip() for symbol in new_symbols]) def _handle_symbol(symbol, symbols, impact): if symbol in symbols: result[impact] = True symbols.discard(symbol) _handle_symbol('bugfix', symbols, 'patch') _handle_symbol('feature', symbols, 'minor') _handle_symbol('deprecation', symbols, 'minor') _handle_symbol('api-break', symbols, 'major') for symbol in symbols: log.info('[pbr] Unknown Sem-Ver symbol %r' % symbol) # We don't want patch in the kwargs since it is not a keyword argument - # its the default minimum increment. result.pop('patch', None) return result def _get_revno_and_last_tag(git_dir): """Return the commit data about the most recent tag. We use git-describe to find this out, but if there are no tags then we fall back to counting commits since the beginning of time. """ changelog = git._iter_log_oneline(git_dir=git_dir) row_count = 0 for row_count, (ignored, tag_set, ignored) in enumerate(changelog): version_tags = set() semver_to_tag = dict() for tag in list(tag_set): try: semver = version.SemanticVersion.from_pip_string(tag) semver_to_tag[semver] = tag version_tags.add(semver) except Exception: pass if version_tags: return semver_to_tag[max(version_tags)], row_count return "", row_count def _get_version_from_git_target(git_dir, target_version): """Calculate a version from a target version in git_dir. This is used for untagged versions only. A new version is calculated as necessary based on git metadata - distance to tags, current hash, contents of commit messages. :param git_dir: The git directory we're working from. :param target_version: If None, the last tagged version (or 0 if there are no tags yet) is incremented as needed to produce an appropriate target version following semver rules. Otherwise target_version is used as a constraint - if semver rules would result in a newer version then an exception is raised. :return: A semver version object. """ tag, distance = _get_revno_and_last_tag(git_dir) last_semver = version.SemanticVersion.from_pip_string(tag or '0') if distance == 0: new_version = last_semver else: new_version = last_semver.increment( **_get_increment_kwargs(git_dir, tag)) if target_version is not None and new_version > target_version: raise ValueError( "git history requires a target version of %(new)s, but target " "version is %(target)s" % dict(new=new_version, target=target_version)) if distance == 0: return last_semver new_dev = new_version.to_dev(distance) if target_version is not None: target_dev = target_version.to_dev(distance) if target_dev > new_dev: return target_dev return new_dev def _get_version_from_git(pre_version=None): """Calculate a version string from git. If the revision is tagged, return that. Otherwise calculate a semantic version description of the tree. The number of revisions since the last tag is included in the dev counter in the version for untagged versions. :param pre_version: If supplied use this as the target version rather than inferring one from the last tag + commit messages. """ git_dir = git._run_git_functions() if git_dir: try: tagged = git._run_git_command( ['describe', '--exact-match'], git_dir, throw_on_error=True).replace('-', '.') target_version = version.SemanticVersion.from_pip_string(tagged) except Exception: if pre_version: # not released yet - use pre_version as the target target_version = version.SemanticVersion.from_pip_string( pre_version) else: # not released yet - just calculate from git history target_version = None result = _get_version_from_git_target(git_dir, target_version) return result.release_string() # If we don't know the version, return an empty string so at least # the downstream users of the value always have the same type of # object to work with. try: return unicode() except NameError: return '' def _get_version_from_pkg_metadata(package_name): """Get the version from package metadata if present. This looks for PKG-INFO if present (for sdists), and if not looks for METADATA (for wheels) and failing that will return None. """ pkg_metadata_filenames = ['PKG-INFO', 'METADATA'] pkg_metadata = {} for filename in pkg_metadata_filenames: try: with open(filename, 'r') as pkg_metadata_file: pkg_metadata = email.message_from_file(pkg_metadata_file) except (IOError, OSError, email.errors.MessageError): continue # Check to make sure we're in our own dir if pkg_metadata.get('Name', None) != package_name: return None return pkg_metadata.get('Version', None) def get_version(package_name, pre_version=None): """Get the version of the project. First, try getting it from PKG-INFO or METADATA, if it exists. If it does, that means we're in a distribution tarball or that install has happened. Otherwise, if there is no PKG-INFO or METADATA file, pull the version from git. We do not support setup.py version sanity in git archive tarballs, nor do we support packagers directly sucking our git repo into theirs. We expect that a source tarball be made from our git repo - or that if someone wants to make a source tarball from a fork of our repo with additional tags in it that they understand and desire the results of doing that. :param pre_version: The version field from setup.cfg - if set then this version will be the next release. """ version = os.environ.get( "PBR_VERSION", os.environ.get("OSLO_PACKAGE_VERSION", None)) if version: return version version = _get_version_from_pkg_metadata(package_name) if version: return version version = _get_version_from_git(pre_version) # Handle http://bugs.python.org/issue11638 # version will either be an empty unicode string or a valid # unicode version string, but either way it's unicode and needs to # be encoded. if sys.version_info[0] == 2: version = version.encode('utf-8') if version: return version raise Exception("Versioning for this project requires either an sdist" " tarball, or access to an upstream git repository." " It's also possible that there is a mismatch between" " the package name in setup.cfg and the argument given" " to pbr.version.VersionInfo. Project name {name} was" " given, but was not able to be found.".format( name=package_name)) # This is added because pbr uses pbr to install itself. That means that # any changes to the egg info writer entrypoints must be forward and # backward compatible. This maintains the pbr.packaging.write_pbr_json # path. write_pbr_json = pbr.pbr_json.write_pbr_json ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/pbr_json.py0000664000175000017500000000234200000000000015357 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from pbr import git def write_pbr_json(cmd, basename, filename): if not hasattr(cmd.distribution, 'pbr') or not cmd.distribution.pbr: return git_dir = git._run_git_functions() if not git_dir: return values = dict() git_version = git.get_git_short_sha(git_dir) is_release = git.get_is_release(git_dir) if git_version is not None: values['git_version'] = git_version values['is_release'] = is_release cmd.write_file('pbr', filename, json.dumps(values, sort_keys=True)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/sphinxext.py0000664000175000017500000000620700000000000015601 0ustar00zuulzuul00000000000000# Copyright 2018 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os.path from six.moves import configparser from sphinx.util import logging import pbr.version _project = None logger = logging.getLogger(__name__) def _find_setup_cfg(srcdir): """Find the 'setup.cfg' file, if it exists. This assumes we're using 'doc/source' for documentation, but also allows for single level 'doc' paths. """ # TODO(stephenfin): Are we sure that this will always exist, e.g. for # an sdist or wheel? Perhaps we should check for 'PKG-INFO' or # 'METADATA' files, a la 'pbr.packaging._get_version_from_pkg_metadata' for path in [ os.path.join(srcdir, os.pardir, 'setup.cfg'), os.path.join(srcdir, os.pardir, os.pardir, 'setup.cfg')]: if os.path.exists(path): return path return None def _get_project_name(srcdir): """Return string name of project name, or None. This extracts metadata from 'setup.cfg'. We don't rely on distutils/setuptools as we don't want to actually install the package simply to build docs. """ global _project if _project is None: parser = configparser.ConfigParser() path = _find_setup_cfg(srcdir) if not path or not parser.read(path): logger.info('Could not find a setup.cfg to extract project name ' 'from') return None try: # for project name we use the name in setup.cfg, but if the # length is longer then 32 we use summary. Otherwise thAe # menu rendering looks brolen project = parser.get('metadata', 'name') if len(project.split()) == 1 and len(project) > 32: project = parser.get('metadata', 'summary') except configparser.Error: logger.info('Could not extract project metadata from setup.cfg') return None _project = project return _project def _builder_inited(app): # TODO(stephenfin): Once Sphinx 1.8 is released, we should move the below # to a 'config-inited' handler project_name = _get_project_name(app.srcdir) try: version_info = pbr.version.VersionInfo(project_name) except Exception: version_info = None if version_info and not app.config.version and not app.config.release: app.config.version = version_info.canonical_version_string() app.config.release = version_info.version_string_with_vcs() def setup(app): app.connect('builder-inited', _builder_inited) return { 'parallel_read_safe': True, 'parallel_write_safe': True, } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/testr_command.py0000664000175000017500000001335300000000000016406 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (c) 2013 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """setuptools/distutils command to run testr via setup.py PBR will hook in the Testr class to provide "setup.py test" when .testr.conf is present in the repository (see pbr/hooks/commands.py). If we are activated but testrepository is not installed, we provide a sensible error. You can pass --coverage which will also export PYTHON='coverage run --source ' and automatically combine the coverage from each testr backend test runner after the run completes. """ from distutils import cmd import distutils.errors import logging import os import sys import warnings logger = logging.getLogger(__name__) class TestrReal(cmd.Command): description = "DEPRECATED: Run unit tests using testr" user_options = [ ('coverage', None, "Replace PYTHON with coverage and merge coverage " "from each testr worker."), ('testr-args=', 't', "Run 'testr' with these args"), ('omit=', 'o', "Files to omit from coverage calculations"), ('coverage-package-name=', None, "Use this name to select packages " "for coverage (one or more, " "comma-separated)"), ('slowest', None, "Show slowest test times after tests complete."), ('no-parallel', None, "Run testr serially"), ('log-level=', 'l', "Log level (default: info)"), ] boolean_options = ['coverage', 'slowest', 'no_parallel'] def _run_testr(self, *args): logger.debug("_run_testr called with args = %r", args) return commands.run_argv([sys.argv[0]] + list(args), sys.stdin, sys.stdout, sys.stderr) def initialize_options(self): self.testr_args = None self.coverage = None self.omit = "" self.slowest = None self.coverage_package_name = None self.no_parallel = None self.log_level = 'info' def finalize_options(self): self.log_level = getattr( logging, self.log_level.upper(), logging.INFO) logging.basicConfig(level=self.log_level) logger.debug("finalize_options called") if self.testr_args is None: self.testr_args = [] else: self.testr_args = self.testr_args.split() if self.omit: self.omit = "--omit=%s" % self.omit logger.debug("finalize_options: self.__dict__ = %r", self.__dict__) def run(self): """Set up testr repo, then run testr.""" logger.debug("run called") warnings.warn('testr integration in pbr is deprecated. Please use ' 'the \'testr\' setup command or call testr directly', DeprecationWarning) if not os.path.isdir(".testrepository"): self._run_testr("init") if self.coverage: self._coverage_before() if not self.no_parallel: testr_ret = self._run_testr("run", "--parallel", *self.testr_args) else: testr_ret = self._run_testr("run", *self.testr_args) if testr_ret: raise distutils.errors.DistutilsError( "testr failed (%d)" % testr_ret) if self.slowest: print("Slowest Tests") self._run_testr("slowest") if self.coverage: self._coverage_after() def _coverage_before(self): logger.debug("_coverage_before called") package = self.distribution.get_name() if package.startswith('python-'): package = package[7:] # Use this as coverage package name if self.coverage_package_name: package = self.coverage_package_name options = "--source %s --parallel-mode" % package os.environ['PYTHON'] = ("coverage run %s" % options) logger.debug("os.environ['PYTHON'] = %r", os.environ['PYTHON']) def _coverage_after(self): logger.debug("_coverage_after called") os.system("coverage combine") os.system("coverage html -d ./cover %s" % self.omit) os.system("coverage xml -o ./cover/coverage.xml %s" % self.omit) class TestrFake(cmd.Command): description = "Run unit tests using testr" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): print("Install testrepository to run 'testr' command properly.") try: from testrepository import commands have_testr = True Testr = TestrReal except ImportError: have_testr = False Testr = TestrFake ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9019606 pbr-5.11.1/pbr/tests/0000775000175000017500000000000000000000000014332 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/__init__.py0000664000175000017500000000173100000000000016445 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import testscenarios def load_tests(loader, standard_tests, pattern): # top level directory cached on loader instance this_dir = os.path.dirname(__file__) package_tests = loader.discover(start_dir=this_dir, pattern=pattern) result = loader.suiteClass() result.addTests(testscenarios.generate_scenarios(standard_tests)) result.addTests(testscenarios.generate_scenarios(package_tests)) return result ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/base.py0000664000175000017500000002134500000000000015623 0ustar00zuulzuul00000000000000# Copyright 2010-2011 OpenStack Foundation # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS """Common utilities used in testing""" import os import shutil import subprocess import sys import fixtures import testresources import testtools from testtools import content from pbr import options class DiveDir(fixtures.Fixture): """Dive into given directory and return back on cleanup. :ivar path: The target directory. """ def __init__(self, path): self.path = path def setUp(self): super(DiveDir, self).setUp() self.addCleanup(os.chdir, os.getcwd()) os.chdir(self.path) class BaseTestCase(testtools.TestCase, testresources.ResourcedTestCase): def setUp(self): super(BaseTestCase, self).setUp() test_timeout = os.environ.get('OS_TEST_TIMEOUT', 30) try: test_timeout = int(test_timeout) except ValueError: # If timeout value is invalid, fail hard. print("OS_TEST_TIMEOUT set to invalid value" " defaulting to no timeout") test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) if os.environ.get('OS_STDOUT_CAPTURE') in options.TRUE_VALUES: stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if os.environ.get('OS_STDERR_CAPTURE') in options.TRUE_VALUES: stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) self.log_fixture = self.useFixture( fixtures.FakeLogger('pbr')) # Older git does not have config --local, so create a temporary home # directory to permit using git config --global without stepping on # developer configuration. self.useFixture(fixtures.TempHomeDir()) self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.FakeLogger()) # TODO(lifeless) we should remove PBR_VERSION from the environment. # rather than setting it, because thats not representative - we need to # test non-preversioned codepaths too! self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION', '0.0')) self.temp_dir = self.useFixture(fixtures.TempDir()).path self.package_dir = os.path.join(self.temp_dir, 'testpackage') shutil.copytree(os.path.join(os.path.dirname(__file__), 'testpackage'), self.package_dir) self.addCleanup(os.chdir, os.getcwd()) os.chdir(self.package_dir) self.addCleanup(self._discard_testpackage) # Tests can opt into non-PBR_VERSION by setting preversioned=False as # an attribute. if not getattr(self, 'preversioned', True): self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION')) setup_cfg_path = os.path.join(self.package_dir, 'setup.cfg') with open(setup_cfg_path, 'rt') as cfg: content = cfg.read() content = content.replace(u'version = 0.1.dev', u'') with open(setup_cfg_path, 'wt') as cfg: cfg.write(content) def _discard_testpackage(self): # Remove pbr.testpackage from sys.modules so that it can be freshly # re-imported by the next test for k in list(sys.modules): if (k == 'pbr_testpackage' or k.startswith('pbr_testpackage.')): del sys.modules[k] def run_pbr(self, *args, **kwargs): return self._run_cmd('pbr', args, **kwargs) def run_setup(self, *args, **kwargs): return self._run_cmd(sys.executable, ('setup.py',) + args, **kwargs) def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None): """Run a command in the root of the test working copy. Runs a command, with the given argument list, in the root of the test working copy--returns the stdout and stderr streams and the exit code from the subprocess. :param cwd: If falsy run within the test package dir, otherwise run within the named path. """ cwd = cwd or self.package_dir result = _run_cmd([cmd] + list(args), cwd=cwd) if result[2] and not allow_fail: raise Exception("Command failed retcode=%s" % result[2]) return result class CapturedSubprocess(fixtures.Fixture): """Run a process and capture its output. :attr stdout: The output (a string). :attr stderr: The standard error (a string). :attr returncode: The return code of the process. Note that stdout and stderr are decoded from the bytestrings subprocess returns using error=replace. """ def __init__(self, label, *args, **kwargs): """Create a CapturedSubprocess. :param label: A label for the subprocess in the test log. E.g. 'foo'. :param *args: The *args to pass to Popen. :param **kwargs: The **kwargs to pass to Popen. """ super(CapturedSubprocess, self).__init__() self.label = label self.args = args self.kwargs = kwargs self.kwargs['stderr'] = subprocess.PIPE self.kwargs['stdin'] = subprocess.PIPE self.kwargs['stdout'] = subprocess.PIPE def setUp(self): super(CapturedSubprocess, self).setUp() proc = subprocess.Popen(*self.args, **self.kwargs) out, err = proc.communicate() self.out = out.decode('utf-8', 'replace') self.err = err.decode('utf-8', 'replace') self.addDetail(self.label + '-stdout', content.text_content(self.out)) self.addDetail(self.label + '-stderr', content.text_content(self.err)) self.returncode = proc.returncode if proc.returncode: raise AssertionError( 'Failed process args=%r, kwargs=%r, returncode=%s' % ( self.args, self.kwargs, proc.returncode)) self.addCleanup(delattr, self, 'out') self.addCleanup(delattr, self, 'err') self.addCleanup(delattr, self, 'returncode') def _run_cmd(args, cwd): """Run the command args in cwd. :param args: The command to run e.g. ['git', 'status'] :param cwd: The directory to run the comamnd in. :return: ((stdout, stderr), returncode) """ print('Running %s' % ' '.join(args)) p = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) streams = tuple(s.decode('latin1').strip() for s in p.communicate()) print('STDOUT:') print(streams[0]) print('STDERR:') print(streams[1]) return (streams) + (p.returncode,) def _config_git(): _run_cmd( ['git', 'config', '--global', 'user.email', 'example@example.com'], None) _run_cmd( ['git', 'config', '--global', 'user.name', 'OpenStack Developer'], None) _run_cmd( ['git', 'config', '--global', 'user.signingkey', 'example@example.com'], None) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_commands.py0000664000175000017500000000715600000000000017555 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS from testtools import content from pbr.tests import base class TestCommands(base.BaseTestCase): def test_custom_build_py_command(self): """Test custom build_py command. Test that a custom subclass of the build_py command runs when listed in the commands [global] option, rather than the normal build command. """ stdout, stderr, return_code = self.run_setup('build_py') self.addDetail('stdout', content.text_content(stdout)) self.addDetail('stderr', content.text_content(stderr)) self.assertIn('Running custom build_py command.', stdout) self.assertEqual(0, return_code) def test_custom_deb_version_py_command(self): """Test custom deb_version command.""" stdout, stderr, return_code = self.run_setup('deb_version') self.addDetail('stdout', content.text_content(stdout)) self.addDetail('stderr', content.text_content(stderr)) self.assertIn('Extracting deb version', stdout) self.assertEqual(0, return_code) def test_custom_rpm_version_py_command(self): """Test custom rpm_version command.""" stdout, stderr, return_code = self.run_setup('rpm_version') self.addDetail('stdout', content.text_content(stdout)) self.addDetail('stderr', content.text_content(stderr)) self.assertIn('Extracting rpm version', stdout) self.assertEqual(0, return_code) def test_freeze_command(self): """Test that freeze output is sorted in a case-insensitive manner.""" stdout, stderr, return_code = self.run_pbr('freeze') self.assertEqual(0, return_code) pkgs = [] for line in stdout.split('\n'): pkgs.append(line.split('==')[0].lower()) pkgs_sort = sorted(pkgs[:]) self.assertEqual(pkgs_sort, pkgs) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_core.py0000664000175000017500000001277600000000000016710 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import glob import os import sys import tarfile import fixtures from pbr.tests import base class TestCore(base.BaseTestCase): cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class') def check_script_install(self, install_stdout): for cmd_name in self.cmd_names: install_txt = 'Installing %s script to %s' % (cmd_name, self.temp_dir) self.assertIn(install_txt, install_stdout) cmd_filename = os.path.join(self.temp_dir, cmd_name) script_txt = open(cmd_filename, 'r').read() self.assertNotIn('pkg_resources', script_txt) stdout, _, return_code = self._run_cmd(cmd_filename) self.assertIn("PBR", stdout) def test_setup_py_keywords(self): """setup.py --keywords. Test that the `./setup.py --keywords` command returns the correct value without balking. """ self.run_setup('egg_info') stdout, _, _ = self.run_setup('--keywords') assert stdout == 'packaging, distutils, setuptools' def test_setup_py_build_sphinx(self): stdout, _, return_code = self.run_setup('build_sphinx') self.assertEqual(0, return_code) def test_sdist_extra_files(self): """Test that the extra files are correctly added.""" stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') # There can be only one try: tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] except IndexError: assert False, 'source dist not found' tf = tarfile.open(tf_path) names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] self.assertIn('extra-file.txt', names) def test_console_script_install(self): """Test that we install a non-pkg-resources console script.""" if os.name == 'nt': self.skipTest('Windows support is passthrough') stdout, _, return_code = self.run_setup( 'install_scripts', '--install-dir=%s' % self.temp_dir) self.useFixture( fixtures.EnvironmentVariable('PYTHONPATH', '.')) self.check_script_install(stdout) def test_console_script_develop(self): """Test that we develop a non-pkg-resources console script.""" if sys.version_info < (3, 0): self.skipTest( 'Fails with recent virtualenv due to ' 'https://github.com/pypa/virtualenv/issues/1638' ) if os.name == 'nt': self.skipTest('Windows support is passthrough') self.useFixture( fixtures.EnvironmentVariable( 'PYTHONPATH', ".:%s" % self.temp_dir)) stdout, _, return_code = self.run_setup( 'develop', '--install-dir=%s' % self.temp_dir) self.check_script_install(stdout) class TestGitSDist(base.BaseTestCase): def setUp(self): super(TestGitSDist, self).setUp() stdout, _, return_code = self._run_cmd('git', ('init',)) if return_code: self.skipTest("git not installed") stdout, _, return_code = self._run_cmd('git', ('add', '.')) stdout, _, return_code = self._run_cmd( 'git', ('commit', '-m', 'Turn this into a git repo')) stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') def test_sdist_git_extra_files(self): """Test that extra files found in git are correctly added.""" # There can be only one tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] tf = tarfile.open(tf_path) names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] self.assertIn('git-extra-file.txt', names) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_files.py0000664000175000017500000001253100000000000017047 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import os import fixtures from pbr.hooks import files from pbr.tests import base class FilesConfigTest(base.BaseTestCase): def setUp(self): super(FilesConfigTest, self).setUp() pkg_fixture = fixtures.PythonPackage( "fake_package", [ ("fake_module.py", b""), ("other_fake_module.py", b""), ]) self.useFixture(pkg_fixture) pkg_etc = os.path.join(pkg_fixture.base, 'etc') pkg_ansible = os.path.join(pkg_fixture.base, 'ansible', 'kolla-ansible', 'test') dir_spcs = os.path.join(pkg_fixture.base, 'dir with space') dir_subdir_spc = os.path.join(pkg_fixture.base, 'multi space', 'more spaces') pkg_sub = os.path.join(pkg_etc, 'sub') subpackage = os.path.join( pkg_fixture.base, 'fake_package', 'subpackage') os.makedirs(pkg_sub) os.makedirs(subpackage) os.makedirs(pkg_ansible) os.makedirs(dir_spcs) os.makedirs(dir_subdir_spc) with open(os.path.join(pkg_etc, "foo"), 'w') as foo_file: foo_file.write("Foo Data") with open(os.path.join(pkg_sub, "bar"), 'w') as foo_file: foo_file.write("Bar Data") with open(os.path.join(pkg_ansible, "baz"), 'w') as baz_file: baz_file.write("Baz Data") with open(os.path.join(subpackage, "__init__.py"), 'w') as foo_file: foo_file.write("# empty") with open(os.path.join(dir_spcs, "file with spc"), 'w') as spc_file: spc_file.write("# empty") with open(os.path.join(dir_subdir_spc, "file with spc"), 'w') as file_: file_.write("# empty") self.useFixture(base.DiveDir(pkg_fixture.base)) def test_implicit_auto_package(self): config = dict( files=dict( ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn('subpackage', config['files']['packages']) def test_auto_package(self): config = dict( files=dict( packages='fake_package', ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn('subpackage', config['files']['packages']) def test_data_files_globbing(self): config = dict( files=dict( data_files="\n etc/pbr = etc/*" ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn( "\n'etc/pbr/' = \n 'etc/foo'\n'etc/pbr/sub' = \n 'etc/sub/bar'", config['files']['data_files']) def test_data_files_with_spaces(self): config = dict( files=dict( data_files="\n 'i like spaces' = 'dir with space'/*" ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn( "\n'i like spaces/' = \n 'dir with space/file with spc'", config['files']['data_files']) def test_data_files_with_spaces_subdirectories(self): # test that we can handle whitespace in subdirectories data_files = "\n 'one space/two space' = 'multi space/more spaces'/*" expected = ( "\n'one space/two space/' = " "\n 'multi space/more spaces/file with spc'") config = dict( files=dict( data_files=data_files ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn(expected, config['files']['data_files']) def test_data_files_with_spaces_quoted_components(self): # test that we can quote individual path components data_files = ( "\n'one space'/'two space' = 'multi space'/'more spaces'/*" ) expected = ("\n'one space/two space/' = " "\n 'multi space/more spaces/file with spc'") config = dict( files=dict( data_files=data_files ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn(expected, config['files']['data_files']) def test_data_files_globbing_source_prefix_in_directory_name(self): # We want to test that the string, "docs", is not replaced in a # subdirectory name, "sub-docs" config = dict( files=dict( data_files="\n share/ansible = ansible/*" ) ) files.FilesConfig(config, 'fake_package').run() self.assertIn( "\n'share/ansible/' = " "\n'share/ansible/kolla-ansible' = " "\n'share/ansible/kolla-ansible/test' = " "\n 'ansible/kolla-ansible/test/baz'", config['files']['data_files']) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_hooks.py0000664000175000017500000000567700000000000017105 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import os from testtools import matchers from testtools import skipUnless from pbr import testr_command from pbr.tests import base from pbr.tests import util class TestHooks(base.BaseTestCase): def setUp(self): super(TestHooks, self).setUp() with util.open_config( os.path.join(self.package_dir, 'setup.cfg')) as cfg: cfg.set('global', 'setup-hooks', 'pbr_testpackage._setup_hooks.test_hook_1\n' 'pbr_testpackage._setup_hooks.test_hook_2') def test_global_setup_hooks(self): """Test setup_hooks. Test that setup_hooks listed in the [global] section of setup.cfg are executed in order. """ stdout, _, return_code = self.run_setup('egg_info') assert 'test_hook_1\ntest_hook_2' in stdout assert return_code == 0 @skipUnless(testr_command.have_testr, "testrepository not available") def test_custom_commands_known(self): stdout, _, return_code = self.run_setup('--help-commands') self.assertFalse(return_code) self.assertThat(stdout, matchers.Contains(" testr ")) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_integration.py0000664000175000017500000002710400000000000020272 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. try: import configparser except ImportError: import ConfigParser as configparser import os.path import pkg_resources import shlex import sys import fixtures import testtools import textwrap from pbr.tests import base from pbr.tests import test_packaging PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', '')) PIPVERSION = os.environ.get('PIPVERSION', 'pip') PBRVERSION = os.environ.get('PBRVERSION', 'pbr') REPODIR = os.environ.get('REPODIR', '') WHEELHOUSE = os.environ.get('WHEELHOUSE', '') PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE] PROJECTS = shlex.split(os.environ.get('PROJECTS', '')) PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) def all_projects(): if not REPODIR: return # Future: make this path parameterisable. excludes = set(['tempest', 'requirements']) for name in PROJECTS: name = name.strip() short_name = name.split('/')[-1] try: with open(os.path.join( REPODIR, short_name, 'setup.py'), 'rt') as f: if 'pbr' not in f.read(): continue except IOError: continue if short_name in excludes: continue yield (short_name, dict(name=name, short_name=short_name)) class TestIntegration(base.BaseTestCase): scenarios = list(all_projects()) def setUp(self): # Integration tests need a higher default - big repos can be slow to # clone, particularly under guest load. env = fixtures.EnvironmentVariable( 'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600')) with env: super(TestIntegration, self).setUp() base._config_git() @testtools.skipUnless( os.environ.get('PBR_INTEGRATION', None) == '1', 'integration tests not enabled') def test_integration(self): # Test that we can: # - run sdist from the repo in a venv # - install the resulting tarball in a new venv # - pip install the repo # - pip install -e the repo # We don't break these into separate tests because we'd need separate # source dirs to isolate from side effects of running pip, and the # overheads of setup would start to beat the benefits of parallelism. path = os.path.join(REPODIR, self.short_name) setup_cfg = os.path.join(path, 'setup.cfg') project_name = pkg_resources.safe_name(self.short_name).lower() # These projects should all have setup.cfg files but we'll be careful if os.path.exists(setup_cfg): config = configparser.ConfigParser() config.read(setup_cfg) if config.has_section('metadata'): raw_name = config.get('metadata', 'name', fallback='notapackagename') # Technically we should really only need to use the raw # name because all our projects should be good and use # normalized names but they don't... project_name = pkg_resources.safe_name(raw_name).lower() constraints = os.path.join(REPODIR, 'requirements', 'upper-constraints.txt') tmp_constraints = os.path.join( self.useFixture(fixtures.TempDir()).path, 'upper-constraints.txt') # We need to filter out the package we are installing to avoid # conflicts with the constraints. with open(constraints, 'r') as src: with open(tmp_constraints, 'w') as dest: for line in src: constraint = line.split('===')[0] if project_name != constraint: dest.write(line) pip_cmd = PIP_CMD + ['-c', tmp_constraints] venv = self.useFixture( test_packaging.Venv('sdist', modules=['pip', 'wheel', PBRVERSION], pip_cmd=PIP_CMD)) python = venv.python self.useFixture(base.CapturedSubprocess( 'sdist', [python, 'setup.py', 'sdist'], cwd=path)) venv = self.useFixture( test_packaging.Venv('tarball', modules=['pip', 'wheel', PBRVERSION], pip_cmd=PIP_CMD)) python = venv.python filename = os.path.join( path, 'dist', os.listdir(os.path.join(path, 'dist'))[0]) self.useFixture(base.CapturedSubprocess( 'tarball', [python] + pip_cmd + [filename])) venv = self.useFixture( test_packaging.Venv('install-git', modules=['pip', 'wheel', PBRVERSION], pip_cmd=PIP_CMD)) root = venv.path python = venv.python self.useFixture(base.CapturedSubprocess( 'install-git', [python] + pip_cmd + ['git+file://' + path])) if self.short_name == 'nova': found = False for _, _, filenames in os.walk(root): if 'migrate.cfg' in filenames: found = True self.assertTrue(found) venv = self.useFixture( test_packaging.Venv('install-e', modules=['pip', 'wheel', PBRVERSION], pip_cmd=PIP_CMD)) root = venv.path python = venv.python self.useFixture(base.CapturedSubprocess( 'install-e', [python] + pip_cmd + ['-e', path])) class TestInstallWithoutPbr(base.BaseTestCase): @testtools.skipUnless( os.environ.get('PBR_INTEGRATION', None) == '1', 'integration tests not enabled') def test_install_without_pbr(self): # Test easy-install of a thing that depends on a thing using pbr tempdir = self.useFixture(fixtures.TempDir()).path # A directory containing sdists of the things we're going to depend on # in using-package. dist_dir = os.path.join(tempdir, 'distdir') os.mkdir(dist_dir) self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), allow_fail=False, cwd=PBR_ROOT) # testpkg - this requires a pbr-using package test_pkg_dir = os.path.join(tempdir, 'testpkg') os.mkdir(test_pkg_dir) pkgs = { 'pkgTest': { 'setup.py': textwrap.dedent("""\ #!/usr/bin/env python import setuptools setuptools.setup( name = 'pkgTest', tests_require = ['pkgReq'], test_suite='pkgReq' ) """), 'setup.cfg': textwrap.dedent("""\ [easy_install] find_links = %s """ % dist_dir)}, 'pkgReq': { 'requirements.txt': textwrap.dedent("""\ pbr """), 'pkgReq/__init__.py': textwrap.dedent("""\ print("FakeTest loaded and ran") """)}, } pkg_dirs = self.useFixture( test_packaging.CreatePackages(pkgs)).package_dirs test_pkg_dir = pkg_dirs['pkgTest'] req_pkg_dir = pkg_dirs['pkgReq'] self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), allow_fail=False, cwd=req_pkg_dir) # A venv to test within venv = self.useFixture(test_packaging.Venv('nopbr', ['pip', 'wheel'])) python = venv.python # Run the depending script self.useFixture(base.CapturedSubprocess( 'nopbr', [python] + ['setup.py', 'test'], cwd=test_pkg_dir)) class TestMarkersPip(base.BaseTestCase): scenarios = [ ('pip-latest', {'modules': ['pip']}), ( 'setuptools-Bullseye', {'modules': ['pip==20.3.4', 'setuptools==52.0.0']}, ), ( 'setuptools-Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}, ), ( 'setuptools-Jammy', {'modules': ['pip==22.0.2', 'setuptools==59.6.0']}, ), ] @testtools.skipUnless( os.environ.get('PBR_INTEGRATION', None) == '1', 'integration tests not enabled', ) def test_pip_versions(self): pkgs = { 'test_markers': {'requirements.txt': textwrap.dedent("""\ pkg_a; python_version=='1.2' pkg_b; python_version!='1.2' """)}, 'pkg_a': {}, 'pkg_b': {}, } pkg_dirs = self.useFixture( test_packaging.CreatePackages(pkgs)).package_dirs temp_dir = self.useFixture(fixtures.TempDir()).path repo_dir = os.path.join(temp_dir, 'repo') venv = self.useFixture(test_packaging.Venv('markers')) bin_python = venv.python os.mkdir(repo_dir) for module in self.modules: self._run_cmd( bin_python, ['-m', 'pip', 'install', '--upgrade', module], cwd=venv.path, allow_fail=False) for pkg in pkg_dirs: self._run_cmd( bin_python, ['setup.py', 'sdist', '-d', repo_dir], cwd=pkg_dirs[pkg], allow_fail=False) self._run_cmd( bin_python, ['-m', 'pip', 'install', '--no-index', '-f', repo_dir, 'test_markers'], cwd=venv.path, allow_fail=False) self.assertIn('pkg-b', self._run_cmd( bin_python, ['-m', 'pip', 'freeze'], cwd=venv.path, allow_fail=False)[0]) # Handle collections.abc moves in python breaking old pip # These versions come from the versions installed from the 'virtualenv' # command from the 'python-virtualenv' package. if sys.version_info[0:3] < (3, 10, 0): lts_scenarios = [ ('Bionic', {'modules': ['pip==9.0.1', 'setuptools==39.0.1']}), ('Stretch', {'modules': ['pip==9.0.1', 'setuptools==33.1.1']}), ('EL8', {'modules': ['pip==9.0.3', 'setuptools==39.2.0']}), ('Buster', {'modules': ['pip==18.1', 'setuptools==40.8.0']}), ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}), ] else: lts_scenarios = [ ('Bullseye', {'modules': ['pip==20.3.4', 'setuptools==52.0.0']}), ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}), ('Jammy', {'modules': ['pip==22.0.2', 'setuptools==59.6.0']}), ] class TestLTSSupport(base.BaseTestCase): scenarios = lts_scenarios @testtools.skipUnless( os.environ.get('PBR_INTEGRATION', None) == '1', 'integration tests not enabled', ) def test_lts_venv_default_versions(self): venv = self.useFixture( test_packaging.Venv('setuptools', modules=self.modules)) bin_python = venv.python pbr = 'file://%s#egg=pbr' % PBR_ROOT # Installing PBR is a reasonable indication that we are not broken on # this particular combination of setuptools and pip. self._run_cmd(bin_python, ['-m', 'pip', 'install', pbr], cwd=venv.path, allow_fail=False) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_packaging.py0000664000175000017500000014376700000000000017711 0ustar00zuulzuul00000000000000# Copyright (c) 2013 New Dream Network, LLC (DreamHost) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import email import email.errors import imp import os import re import sysconfig import tempfile import textwrap import fixtures try: from unittest import mock except ImportError: import mock import pkg_resources import six import testscenarios import testtools from testtools import matchers import virtualenv from wheel import wheelfile from pbr import git from pbr import packaging from pbr.tests import base PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) class TestRepo(fixtures.Fixture): """A git repo for testing with. Use of TempHomeDir with this fixture is strongly recommended as due to the lack of config --local in older gits, it will write to the users global configuration without TempHomeDir. """ def __init__(self, basedir): super(TestRepo, self).__init__() self._basedir = basedir def setUp(self): super(TestRepo, self).setUp() base._run_cmd(['git', 'init', '.'], self._basedir) base._config_git() base._run_cmd(['git', 'add', '.'], self._basedir) def commit(self, message_content='test commit'): files = len(os.listdir(self._basedir)) path = self._basedir + '/%d' % files open(path, 'wt').close() base._run_cmd(['git', 'add', path], self._basedir) base._run_cmd(['git', 'commit', '-m', message_content], self._basedir) def uncommit(self): base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir) def tag(self, version): base._run_cmd( ['git', 'tag', '-sm', 'test tag', version], self._basedir) class GPGKeyFixture(fixtures.Fixture): """Creates a GPG key for testing. It's recommended that this be used in concert with a unique home directory. """ def setUp(self): super(GPGKeyFixture, self).setUp() tempdir = self.useFixture(fixtures.TempDir()) gnupg_version_re = re.compile(r'^gpg\s.*\s([\d+])\.([\d+])\.([\d+])') gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path) for line in gnupg_version[0].split('\n'): gnupg_version = gnupg_version_re.match(line) if gnupg_version: gnupg_version = (int(gnupg_version.group(1)), int(gnupg_version.group(2)), int(gnupg_version.group(3))) break else: if gnupg_version is None: gnupg_version = (0, 0, 0) config_file = os.path.join(tempdir.path, 'key-config') with open(config_file, 'wt') as f: if gnupg_version[0] == 2 and gnupg_version[1] >= 1: f.write(""" %no-protection %transient-key """) f.write(""" %no-ask-passphrase Key-Type: RSA Name-Real: Example Key Name-Comment: N/A Name-Email: example@example.com Expire-Date: 2d %commit """) # Note that --quick-random (--debug-quick-random in GnuPG 2.x) # does not have a corresponding preferences file setting and # must be passed explicitly on the command line instead if gnupg_version[0] == 1: gnupg_random = '--quick-random' elif gnupg_version[0] >= 2: gnupg_random = '--debug-quick-random' else: gnupg_random = '' base._run_cmd( ['gpg', '--gen-key', '--batch', gnupg_random, config_file], tempdir.path) class Venv(fixtures.Fixture): """Create a virtual environment for testing with. :attr path: The path to the environment root. :attr python: The path to the python binary in the environment. """ def __init__(self, reason, modules=(), pip_cmd=None): """Create a Venv fixture. :param reason: A human readable string to bake into the venv file path to aid diagnostics in the case of failures. :param modules: A list of modules to install, defaults to latest pip, wheel, and the working copy of PBR. :attr pip_cmd: A list to override the default pip_cmd passed to python for installing base packages. """ self._reason = reason if modules == (): modules = ['pip', 'wheel', 'build', PBR_ROOT] self.modules = modules if pip_cmd is None: self.pip_cmd = ['-m', 'pip', '-v', 'install'] else: self.pip_cmd = pip_cmd def _setUp(self): path = self.useFixture(fixtures.TempDir()).path virtualenv.cli_run([path]) python = os.path.join(path, 'bin', 'python') command = [python] + self.pip_cmd + ['-U'] if self.modules and len(self.modules) > 0: command.extend(self.modules) self.useFixture(base.CapturedSubprocess( 'mkvenv-' + self._reason, command)) self.addCleanup(delattr, self, 'path') self.addCleanup(delattr, self, 'python') self.path = path self.python = python return path, python class CreatePackages(fixtures.Fixture): """Creates packages from dict with defaults :param package_dirs: A dict of package name to directory strings {'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'} """ defaults = { 'setup.py': textwrap.dedent(six.u("""\ #!/usr/bin/env python import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, ) """)), 'setup.cfg': textwrap.dedent(six.u("""\ [metadata] name = {pkg_name} """)) } def __init__(self, packages): """Creates packages from dict with defaults :param packages: a dict where the keys are the package name and a value that is a second dict that may be empty, containing keys of filenames and a string value of the contents. {'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'} """ self.packages = packages def _writeFile(self, directory, file_name, contents): path = os.path.abspath(os.path.join(directory, file_name)) path_dir = os.path.dirname(path) if not os.path.exists(path_dir): if path_dir.startswith(directory): os.makedirs(path_dir) else: raise ValueError with open(path, 'wt') as f: f.write(contents) def _setUp(self): tmpdir = self.useFixture(fixtures.TempDir()).path package_dirs = {} for pkg_name in self.packages: pkg_path = os.path.join(tmpdir, pkg_name) package_dirs[pkg_name] = pkg_path os.mkdir(pkg_path) for cf in ['setup.py', 'setup.cfg']: if cf in self.packages[pkg_name]: contents = self.packages[pkg_name].pop(cf) else: contents = self.defaults[cf].format(pkg_name=pkg_name) self._writeFile(pkg_path, cf, contents) for cf in self.packages[pkg_name]: self._writeFile(pkg_path, cf, self.packages[pkg_name][cf]) self.useFixture(TestRepo(pkg_path)).commit() self.addCleanup(delattr, self, 'package_dirs') self.package_dirs = package_dirs return package_dirs class TestPackagingInGitRepoWithCommit(base.BaseTestCase): scenarios = [ ('preversioned', dict(preversioned=True)), ('postversioned', dict(preversioned=False)), ] def setUp(self): super(TestPackagingInGitRepoWithCommit, self).setUp() self.repo = self.useFixture(TestRepo(self.package_dir)) self.repo.commit() def test_authors(self): self.run_setup('sdist', allow_fail=False) # One commit, something should be in the authors list with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: body = f.read() self.assertNotEqual(body, '') def test_changelog(self): self.run_setup('sdist', allow_fail=False) with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() # One commit, something should be in the ChangeLog list self.assertNotEqual(body, '') def test_changelog_handles_astrisk(self): self.repo.commit(message_content="Allow *.openstack.org to work") self.run_setup('sdist', allow_fail=False) with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() self.assertIn(r'\*', body) def test_changelog_handles_dead_links_in_commit(self): self.repo.commit(message_content="See os_ for to_do about qemu_.") self.run_setup('sdist', allow_fail=False) with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() self.assertIn(r'os\_', body) self.assertIn(r'to\_do', body) self.assertIn(r'qemu\_', body) def test_changelog_handles_backticks(self): self.repo.commit(message_content="Allow `openstack.org` to `work") self.run_setup('sdist', allow_fail=False) with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() self.assertIn(r'\`', body) def test_manifest_exclude_honoured(self): self.run_setup('sdist', allow_fail=False) with open(os.path.join( self.package_dir, 'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f: body = f.read() self.assertThat( body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py'))) self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py')) def test_install_writes_changelog(self): stdout, _, _ = self.run_setup( 'install', '--root', self.temp_dir + 'installed', allow_fail=False) self.expectThat(stdout, matchers.Contains('Generating ChangeLog')) class TestExtrafileInstallation(base.BaseTestCase): def test_install_glob(self): stdout, _, _ = self.run_setup( 'install', '--root', self.temp_dir + 'installed', allow_fail=False) self.expectThat( stdout, matchers.Contains('copying data_files/a.txt')) self.expectThat( stdout, matchers.Contains('copying data_files/b.txt')) class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase): def setUp(self): super(TestPackagingInGitRepoWithoutCommit, self).setUp() self.useFixture(TestRepo(self.package_dir)) self.run_setup('sdist', allow_fail=False) def test_authors(self): # No commits, no authors in list with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: body = f.read() self.assertEqual('\n', body) def test_changelog(self): # No commits, nothing should be in the ChangeLog list with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() self.assertEqual('CHANGES\n=======\n\n', body) class TestPackagingWheels(base.BaseTestCase): def setUp(self): super(TestPackagingWheels, self).setUp() self.useFixture(TestRepo(self.package_dir)) # Build the wheel self.run_setup('bdist_wheel', allow_fail=False) # Slowly construct the path to the generated whl dist_dir = os.path.join(self.package_dir, 'dist') relative_wheel_filename = os.listdir(dist_dir)[0] absolute_wheel_filename = os.path.join( dist_dir, relative_wheel_filename) wheel_file = wheelfile.WheelFile(absolute_wheel_filename) wheel_name = wheel_file.parsed_filename.group('namever') # Create a directory path to unpack the wheel to self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name) # Extract the wheel contents to the directory we just created wheel_file.extractall(self.extracted_wheel_dir) wheel_file.close() def test_metadata_directory_has_pbr_json(self): # Build the path to the scripts directory pbr_json = os.path.join( self.extracted_wheel_dir, 'pbr_testpackage-0.0.dist-info/pbr.json') self.assertTrue(os.path.exists(pbr_json)) def test_data_directory_has_wsgi_scripts(self): # Build the path to the scripts directory scripts_dir = os.path.join( self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts') self.assertTrue(os.path.exists(scripts_dir)) scripts = os.listdir(scripts_dir) self.assertIn('pbr_test_wsgi', scripts) self.assertIn('pbr_test_wsgi_with_class', scripts) self.assertNotIn('pbr_test_cmd', scripts) self.assertNotIn('pbr_test_cmd_with_class', scripts) def test_generates_c_extensions(self): built_package_dir = os.path.join( self.extracted_wheel_dir, 'pbr_testpackage') static_object_filename = 'testext.so' soabi = get_soabi() if soabi: static_object_filename = 'testext.{0}.so'.format(soabi) static_object_path = os.path.join( built_package_dir, static_object_filename) self.assertTrue(os.path.exists(built_package_dir)) self.assertTrue(os.path.exists(static_object_path)) class TestPackagingHelpers(testtools.TestCase): def test_generate_script(self): group = 'console_scripts' entry_point = pkg_resources.EntryPoint( name='test-ep', module_name='pbr.packaging', attrs=('LocalInstallScripts',)) header = '#!/usr/bin/env fake-header\n' template = ('%(group)s %(module_name)s %(import_target)s ' '%(invoke_target)s') generated_script = packaging.generate_script( group, entry_point, header, template) expected_script = ( '#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging ' 'LocalInstallScripts LocalInstallScripts' ) self.assertEqual(expected_script, generated_script) def test_generate_script_validates_expectations(self): group = 'console_scripts' entry_point = pkg_resources.EntryPoint( name='test-ep', module_name='pbr.packaging') header = '#!/usr/bin/env fake-header\n' template = ('%(group)s %(module_name)s %(import_target)s ' '%(invoke_target)s') self.assertRaises( ValueError, packaging.generate_script, group, entry_point, header, template) entry_point = pkg_resources.EntryPoint( name='test-ep', module_name='pbr.packaging', attrs=('attr1', 'attr2', 'attr3')) self.assertRaises( ValueError, packaging.generate_script, group, entry_point, header, template) class TestPackagingInPlainDirectory(base.BaseTestCase): def setUp(self): super(TestPackagingInPlainDirectory, self).setUp() def test_authors(self): self.run_setup('sdist', allow_fail=False) # Not a git repo, no AUTHORS file created filename = os.path.join(self.package_dir, 'AUTHORS') self.assertFalse(os.path.exists(filename)) def test_changelog(self): self.run_setup('sdist', allow_fail=False) # Not a git repo, no ChangeLog created filename = os.path.join(self.package_dir, 'ChangeLog') self.assertFalse(os.path.exists(filename)) def test_install_no_ChangeLog(self): stdout, _, _ = self.run_setup( 'install', '--root', self.temp_dir + 'installed', allow_fail=False) self.expectThat( stdout, matchers.Not(matchers.Contains('Generating ChangeLog'))) class TestPresenceOfGit(base.BaseTestCase): def testGitIsInstalled(self): with mock.patch.object(git, '_run_shell_command') as _command: _command.return_value = 'git version 1.8.4.1' self.assertEqual(True, git._git_is_installed()) def testGitIsNotInstalled(self): with mock.patch.object(git, '_run_shell_command') as _command: _command.side_effect = OSError self.assertEqual(False, git._git_is_installed()) class ParseRequirementsTest(base.BaseTestCase): def test_empty_requirements(self): actual = packaging.parse_requirements([]) self.assertEqual([], actual) def test_default_requirements(self): """Ensure default files used if no files provided.""" tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'w') as f: f.write('pbr') # the defaults are relative to where pbr is called from so we need to # override them. This is OK, however, as we want to validate that # defaults are used - not what those defaults are with mock.patch.object(packaging, 'REQUIREMENTS_FILES', ( requirements,)): result = packaging.parse_requirements() self.assertEqual(['pbr'], result) def test_override_with_env(self): """Ensure environment variable used if no files provided.""" _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') with open(tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements()) def test_override_with_env_multiple_files(self): _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') with open(tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', "no-such-file," + tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements()) def test_index_present(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'w') as f: f.write('-i https://myindex.local\n') f.write(' --index-url https://myindex.local\n') f.write(' --extra-index-url https://myindex.local\n') f.write('--find-links https://myindex.local\n') f.write('arequirement>=1.0\n') result = packaging.parse_requirements([requirements]) self.assertEqual(['arequirement>=1.0'], result) def test_nested_requirements(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') nested = os.path.join(tempdir, 'nested.txt') with open(requirements, 'w') as f: f.write('-r ' + nested) with open(nested, 'w') as f: f.write('pbr') result = packaging.parse_requirements([requirements]) self.assertEqual(['pbr'], result) class ParseRequirementsTestScenarios(base.BaseTestCase): versioned_scenarios = [ ('non-versioned', {'versioned': False, 'expected': ['bar']}), ('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']}) ] subdirectory_scenarios = [ ('non-subdirectory', {'has_subdirectory': False}), ('has-subdirectory', {'has_subdirectory': True}) ] scenarios = [ ('normal', {'url': "foo\nbar", 'expected': ['foo', 'bar']}), ('normal_with_comments', { 'url': "# this is a comment\nfoo\n# and another one\nbar", 'expected': ['foo', 'bar']}), ('removes_index_lines', {'url': '-f foobar', 'expected': []}), ] scenarios = scenarios + testscenarios.multiply_scenarios([ ('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}), ('git_https_egg_url', {'url': 'git+https://foo.com/zipball#egg=bar'}), ('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}), ], versioned_scenarios, subdirectory_scenarios) scenarios = scenarios + testscenarios.multiply_scenarios( [ ('git_egg_url', {'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'}) ], [ ('non-editable', {'editable': False}), ('editable', {'editable': True}), ], versioned_scenarios, subdirectory_scenarios) def test_parse_requirements(self): tmp_file = tempfile.NamedTemporaryFile() req_string = self.url if hasattr(self, 'editable') and self.editable: req_string = ("-e %s" % req_string) if hasattr(self, 'versioned') and self.versioned: req_string = ("%s-1.2.3" % req_string) if hasattr(self, 'has_subdirectory') and self.has_subdirectory: req_string = ("%s&subdirectory=baz" % req_string) with open(tmp_file.name, 'w') as fh: fh.write(req_string) self.assertEqual(self.expected, packaging.parse_requirements([tmp_file.name])) class ParseDependencyLinksTest(base.BaseTestCase): def setUp(self): super(ParseDependencyLinksTest, self).setUp() _, self.tmp_file = tempfile.mkstemp(prefix="openstack", suffix=".setup") def test_parse_dependency_normal(self): with open(self.tmp_file, "w") as fh: fh.write("http://test.com\n") self.assertEqual( ["http://test.com"], packaging.parse_dependency_links([self.tmp_file])) def test_parse_dependency_with_git_egg_url(self): with open(self.tmp_file, "w") as fh: fh.write("-e git://foo.com/zipball#egg=bar") self.assertEqual( ["git://foo.com/zipball#egg=bar"], packaging.parse_dependency_links([self.tmp_file])) class TestVersions(base.BaseTestCase): scenarios = [ ('preversioned', dict(preversioned=True)), ('postversioned', dict(preversioned=False)), ] def setUp(self): super(TestVersions, self).setUp() self.repo = self.useFixture(TestRepo(self.package_dir)) self.useFixture(GPGKeyFixture()) self.useFixture(base.DiveDir(self.package_dir)) def test_email_parsing_errors_are_handled(self): mocked_open = mock.mock_open() with mock.patch('pbr.packaging.open', mocked_open): with mock.patch('email.message_from_file') as message_from_file: message_from_file.side_effect = [ email.errors.MessageError('Test'), {'Name': 'pbr_testpackage'}] version = packaging._get_version_from_pkg_metadata( 'pbr_testpackage') self.assertTrue(message_from_file.called) self.assertIsNone(version) def test_capitalized_headers(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-Ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_capitalized_headers_partial(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_multi_inline_symbols_no_space(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-ver: feature,api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_multi_inline_symbols_spaced(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-ver: feature, api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_multi_inline_symbols_reversed(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-ver: api-break,feature') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_leading_space(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit(' sem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_leading_space_multiline(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit( ( ' Some cool text\n' ' sem-ver: api-break' ) ) version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_leading_characters_symbol_not_found(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit(' ssem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) def test_tagged_version_has_tag_version(self): self.repo.commit() self.repo.tag('1.2.3') version = packaging._get_version_from_git('1.2.3') self.assertEqual('1.2.3', version) def test_tagged_version_with_semver_compliant_prerelease(self): self.repo.commit() self.repo.tag('1.2.3-rc2') version = packaging._get_version_from_git() self.assertEqual('1.2.3.0rc2', version) def test_non_canonical_tagged_version_bump(self): self.repo.commit() self.repo.tag('1.4') self.repo.commit('Sem-Ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_untagged_version_has_dev_version_postversion(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) def test_untagged_pre_release_has_pre_dev_version_postversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) def test_untagged_version_minor_bump(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: deprecation') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.3.0.dev1')) def test_untagged_version_major_bump(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_untagged_version_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() version = packaging._get_version_from_git('1.2.5') self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) def test_untagged_version_after_pre_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git('1.2.5') self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) def test_untagged_version_after_rc_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git('1.2.3') self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) def test_untagged_version_after_semver_compliant_prerelease_tag(self): self.repo.commit() self.repo.tag('1.2.3-rc2') self.repo.commit() version = packaging._get_version_from_git() self.assertEqual('1.2.3.0rc3.dev1', version) def test_preversion_too_low_simple(self): # That is, the target version is either already released or not high # enough for the semver requirements given api breaks etc. self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() # Note that we can't target 1.2.3 anymore - with 1.2.3 released we # need to be working on 1.2.4. err = self.assertRaises( ValueError, packaging._get_version_from_git, '1.2.3') self.assertThat(err.args[0], matchers.StartsWith('git history')) def test_preversion_too_low_semver_headers(self): # That is, the target version is either already released or not high # enough for the semver requirements given api breaks etc. self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: feature') # Note that we can't target 1.2.4, the feature header means we need # to be working on 1.3.0 or above. err = self.assertRaises( ValueError, packaging._get_version_from_git, '1.2.4') self.assertThat(err.args[0], matchers.StartsWith('git history')) def test_get_kwargs_corner_cases(self): # No tags: def get_kwargs(tag): git_dir = self.repo._basedir + '/.git' return packaging._get_increment_kwargs(git_dir, tag) def _check_combinations(tag): self.repo.commit() self.assertEqual(dict(), get_kwargs(tag)) self.repo.commit('sem-ver: bugfix') self.assertEqual(dict(), get_kwargs(tag)) self.repo.commit('sem-ver: feature') self.assertEqual(dict(minor=True), get_kwargs(tag)) self.repo.uncommit() self.repo.commit('sem-ver: deprecation') self.assertEqual(dict(minor=True), get_kwargs(tag)) self.repo.uncommit() self.repo.commit('sem-ver: api-break') self.assertEqual(dict(major=True), get_kwargs(tag)) self.repo.commit('sem-ver: deprecation') self.assertEqual(dict(major=True, minor=True), get_kwargs(tag)) _check_combinations('') self.repo.tag('1.2.3') _check_combinations('1.2.3') def test_invalid_tag_ignored(self): # Fix for bug 1356784 - we treated any tag as a version, not just those # that are valid versions. self.repo.commit() self.repo.tag('1') self.repo.commit() # when the tree is tagged and its wrong: self.repo.tag('badver') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.0.1.dev1')) # When the tree isn't tagged, we also fall through. self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.0.1.dev2')) # We don't fall through x.y versions self.repo.commit() self.repo.tag('1.2') self.repo.commit() self.repo.tag('badver2') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.1.dev1')) # Or x.y.z versions self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() self.repo.tag('badver3') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) # Or alpha/beta/pre versions self.repo.commit() self.repo.tag('1.2.4.0a1') self.repo.commit() self.repo.tag('badver4') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.0a2.dev1')) # Non-release related tags are ignored. self.repo.commit() self.repo.tag('2') self.repo.commit() self.repo.tag('non-release-tag/2014.12.16-1') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.1.dev1')) def test_valid_tag_honoured(self): # Fix for bug 1370608 - we converted any target into a 'dev version' # even if there was a distance of 0 - indicating that we were on the # tag itself. self.repo.commit() self.repo.tag('1.3.0.0a1') version = packaging._get_version_from_git() self.assertEqual('1.3.0.0a1', version) def test_skip_write_git_changelog(self): # Fix for bug 1467440 self.repo.commit() self.repo.tag('1.2.3') os.environ['SKIP_WRITE_GIT_CHANGELOG'] = '1' version = packaging._get_version_from_git('1.2.3') self.assertEqual('1.2.3', version) def tearDown(self): super(TestVersions, self).tearDown() os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None) class TestRequirementParsing(base.BaseTestCase): def test_requirement_parsing(self): pkgs = { 'test_reqparse': { 'requirements.txt': textwrap.dedent("""\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """), 'setup.cfg': textwrap.dedent("""\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """)}, } pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs pkg_dir = pkg_dirs['test_reqparse'] # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ['bar', 'requests-aws>=0.1.4'], ":(python_version=='2.6')": ['quux<1.0'], ":(python_version=='2.7')": ['Routes!=2.0,!=2.1,>=1.12.3', 'requests-kerberos>=0.6'], 'test': ['foo'], "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] } venv = self.useFixture(Venv('reqParse')) bin_python = venv.python # Two things are tested by this # 1) pbr properly parses markers from requiremnts.txt and setup.cfg # 2) bdist_wheel causes pbr to not evaluate markers self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'), allow_fail=False, cwd=pkg_dir) egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info') requires_txt = os.path.join(egg_info, 'requires.txt') with open(requires_txt, 'rt') as requires: generated_requirements = dict( pkg_resources.split_sections(requires)) # NOTE(dhellmann): We have to spell out the comparison because # the rendering for version specifiers in a range is not # consistent across versions of setuptools. for section, expected in expected_requirements.items(): exp_parsed = [ pkg_resources.Requirement.parse(s) for s in expected ] gen_parsed = [ pkg_resources.Requirement.parse(s) for s in generated_requirements[section] ] self.assertEqual(exp_parsed, gen_parsed) class TestPEP517Support(base.BaseTestCase): def test_pep_517_support(self): # Note that the current PBR PEP517 entrypoints rely on a valid # PBR setup.py existing. pkgs = { 'test_pep517': { 'requirements.txt': textwrap.dedent("""\ sphinx iso8601 """), # Override default setup.py to remove setup_requires. 'setup.py': textwrap.dedent("""\ #!/usr/bin/env python import setuptools setuptools.setup(pbr=True) """), 'setup.cfg': textwrap.dedent("""\ [metadata] name = test_pep517 summary = A tiny test project author = PBR Team author-email = foo@example.com home-page = https://example.com/ classifier = Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 """), 'pyproject.toml': textwrap.dedent("""\ [build-system] requires = ["pbr", "setuptools>=36.6.0", "wheel"] build-backend = "pbr.build" """)}, } pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs pkg_dir = pkg_dirs['test_pep517'] venv = self.useFixture(Venv('PEP517')) # Test building sdists and wheels works. Note we do not use pip here # because pip will forcefully install the latest version of PBR on # pypi to satisfy the build-system requires. This means we can't self # test changes using pip. Build with --no-isolation appears to avoid # this problem. self._run_cmd(venv.python, ('-m', 'build', '--no-isolation', '.'), allow_fail=False, cwd=pkg_dir) class TestRepositoryURLDependencies(base.BaseTestCase): def setUp(self): super(TestRepositoryURLDependencies, self).setUp() self.requirements = os.path.join(tempfile.mkdtemp(), 'requirements.txt') with open(self.requirements, 'w') as f: f.write('\n'.join([ '-e git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123', # noqa '-e git+git://git.project.org/Proj#egg=Proj1', 'git+https://git.project.org/Proj#egg=Proj2-0.0.1', '-e git+ssh://git.project.org/Proj#egg=Proj3', 'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2', '-e svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5', 'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3', '-e hg+http://hg.project.org/Proj@2019#egg=Proj', 'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4', '-e hg+http://hg.project.org/Proj@special_feature#egg=Proj', 'git://foo.com/zipball#egg=foo-bar-1.2.4', 'pypi-proj1', 'pypi-proj2'])) def test_egg_fragment(self): expected = [ 'django-thumborize', 'django-thumborize-beta', 'django-thumborize2-beta', 'django-thumborize2-beta>=4.0.1', 'django-thumborize2-beta>=1.0.0-alpha.beta.1', 'django-thumborize2-beta>=1.0.0-alpha-a.b-c-long+build.1-aef.1-its-okay', # noqa 'django-thumborize2-beta>=2.0.0-rc.1+build.123', 'django-thumborize-beta>=0.0.4', 'django-thumborize-beta>=1.2.3', 'django-thumborize-beta>=10.20.30', 'django-thumborize-beta>=1.1.2-prerelease+meta', 'django-thumborize-beta>=1.1.2+meta', 'django-thumborize-beta>=1.1.2+meta-valid', 'django-thumborize-beta>=1.0.0-alpha', 'django-thumborize-beta>=1.0.0-beta', 'django-thumborize-beta>=1.0.0-alpha.beta', 'django-thumborize-beta>=1.0.0-alpha.beta.1', 'django-thumborize-beta>=1.0.0-alpha.1', 'django-thumborize-beta>=1.0.0-alpha0.valid', 'django-thumborize-beta>=1.0.0-alpha.0valid', 'django-thumborize-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa 'django-thumborize-beta>=1.0.0-rc.1+build.1', 'django-thumborize-beta>=2.0.0-rc.1+build.123', 'django-thumborize-beta>=1.2.3-beta', 'django-thumborize-beta>=10.2.3-DEV-SNAPSHOT', 'django-thumborize-beta>=1.2.3-SNAPSHOT-123', 'django-thumborize-beta>=1.0.0', 'django-thumborize-beta>=2.0.0', 'django-thumborize-beta>=1.1.7', 'django-thumborize-beta>=2.0.0+build.1848', 'django-thumborize-beta>=2.0.1-alpha.1227', 'django-thumborize-beta>=1.0.0-alpha+beta', 'django-thumborize-beta>=1.2.3----RC-SNAPSHOT.12.9.1--.12+788', 'django-thumborize-beta>=1.2.3----R-S.12.9.1--.12+meta', 'django-thumborize-beta>=1.2.3----RC-SNAPSHOT.12.9.1--.12', 'django-thumborize-beta>=1.0.0+0.build.1-rc.10000aaa-kk-0.1', 'django-thumborize-beta>=999999999999999999.99999999999999.9999999999999', # noqa 'Proj1', 'Proj2>=0.0.1', 'Proj3', 'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3', 'Proj', 'Proj>=0.0.4', 'Proj', 'foo-bar>=1.2.4', ] tests = [ 'egg=django-thumborize', 'egg=django-thumborize-beta', 'egg=django-thumborize2-beta', 'egg=django-thumborize2-beta-4.0.1', 'egg=django-thumborize2-beta-1.0.0-alpha.beta.1', 'egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-long+build.1-aef.1-its-okay', # noqa 'egg=django-thumborize2-beta-2.0.0-rc.1+build.123', 'egg=django-thumborize-beta-0.0.4', 'egg=django-thumborize-beta-1.2.3', 'egg=django-thumborize-beta-10.20.30', 'egg=django-thumborize-beta-1.1.2-prerelease+meta', 'egg=django-thumborize-beta-1.1.2+meta', 'egg=django-thumborize-beta-1.1.2+meta-valid', 'egg=django-thumborize-beta-1.0.0-alpha', 'egg=django-thumborize-beta-1.0.0-beta', 'egg=django-thumborize-beta-1.0.0-alpha.beta', 'egg=django-thumborize-beta-1.0.0-alpha.beta.1', 'egg=django-thumborize-beta-1.0.0-alpha.1', 'egg=django-thumborize-beta-1.0.0-alpha0.valid', 'egg=django-thumborize-beta-1.0.0-alpha.0valid', 'egg=django-thumborize-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa 'egg=django-thumborize-beta-1.0.0-rc.1+build.1', 'egg=django-thumborize-beta-2.0.0-rc.1+build.123', 'egg=django-thumborize-beta-1.2.3-beta', 'egg=django-thumborize-beta-10.2.3-DEV-SNAPSHOT', 'egg=django-thumborize-beta-1.2.3-SNAPSHOT-123', 'egg=django-thumborize-beta-1.0.0', 'egg=django-thumborize-beta-2.0.0', 'egg=django-thumborize-beta-1.1.7', 'egg=django-thumborize-beta-2.0.0+build.1848', 'egg=django-thumborize-beta-2.0.1-alpha.1227', 'egg=django-thumborize-beta-1.0.0-alpha+beta', 'egg=django-thumborize-beta-1.2.3----RC-SNAPSHOT.12.9.1--.12+788', # noqa 'egg=django-thumborize-beta-1.2.3----R-S.12.9.1--.12+meta', 'egg=django-thumborize-beta-1.2.3----RC-SNAPSHOT.12.9.1--.12', 'egg=django-thumborize-beta-1.0.0+0.build.1-rc.10000aaa-kk-0.1', # noqa 'egg=django-thumborize-beta-999999999999999999.99999999999999.9999999999999', # noqa 'egg=Proj1', 'egg=Proj2-0.0.1', 'egg=Proj3', 'egg=Proj4-0.0.2', 'egg=Proj5', 'egg=Proj-0.0.3', 'egg=Proj', 'egg=Proj-0.0.4', 'egg=Proj', 'egg=foo-bar-1.2.4', ] for index, test in enumerate(tests): self.assertEqual(expected[index], re.sub(r'egg=([^&]+).*$', packaging.egg_fragment, test)) def test_parse_repo_url_requirements(self): result = packaging.parse_requirements([self.requirements]) self.assertEqual(['oslo.messaging>=1.0.0-rc', 'django-thumborize', 'django-thumborize-beta', 'django-thumborize2-beta', 'django-thumborize2-beta>=4.0.1', 'django-thumborize2-beta>=1.0.0-alpha.beta.1', 'django-thumborize2-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa 'django-thumborize2-beta>=2.0.0-rc.1+build.123', 'Proj1', 'Proj2>=0.0.1', 'Proj3', 'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3', 'Proj', 'Proj>=0.0.4', 'Proj', 'foo-bar>=1.2.4', 'pypi-proj1', 'pypi-proj2'], result) def test_parse_repo_url_dependency_links(self): result = packaging.parse_dependency_links([self.requirements]) self.assertEqual( [ 'git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123', # noqa 'git+git://git.project.org/Proj#egg=Proj1', 'git+https://git.project.org/Proj#egg=Proj2-0.0.1', 'git+ssh://git.project.org/Proj#egg=Proj3', 'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2', 'svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5', 'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3', 'hg+http://hg.project.org/Proj@2019#egg=Proj', 'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4', 'hg+http://hg.project.org/Proj@special_feature#egg=Proj', 'git://foo.com/zipball#egg=foo-bar-1.2.4'], result) def get_soabi(): soabi = None try: soabi = sysconfig.get_config_var('SOABI') arch = sysconfig.get_config_var('MULTIARCH') except IOError: pass if soabi and arch and 'pypy' in sysconfig.get_scheme_names(): soabi = '%s-%s' % (soabi, arch) if soabi is None and 'pypy' in sysconfig.get_scheme_names(): # NOTE(sigmavirus24): PyPy only added support for the SOABI config var # to sysconfig in 2015. That was well after 2.2.1 was published in the # Ubuntu 14.04 archive. for suffix, _, _ in imp.get_suffixes(): if suffix.startswith('.pypy') and suffix.endswith('.so'): soabi = suffix.split('.')[1] break return soabi ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_pbr_json.py0000664000175000017500000000230500000000000017557 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. try: from unittest import mock except ImportError: import mock from pbr import pbr_json from pbr.tests import base class TestJsonContent(base.BaseTestCase): @mock.patch('pbr.git._run_git_functions', return_value=True) @mock.patch('pbr.git.get_git_short_sha', return_value="123456") @mock.patch('pbr.git.get_is_release', return_value=True) def test_content(self, mock_get_is, mock_get_git, mock_run): cmd = mock.Mock() pbr_json.write_pbr_json(cmd, "basename", "pbr.json") cmd.write_file.assert_called_once_with( 'pbr', 'pbr.json', '{"git_version": "123456", "is_release": true}' ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_setup.py0000664000175000017500000004402200000000000017105 0ustar00zuulzuul00000000000000# Copyright (c) 2011 OpenStack Foundation # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import os try: import cStringIO as io BytesIO = io.StringIO except ImportError: import io BytesIO = io.BytesIO import fixtures from pbr import git from pbr import options from pbr import packaging from pbr.tests import base class SkipFileWrites(base.BaseTestCase): scenarios = [ ('changelog_option_true', dict(option_key='skip_changelog', option_value='True', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, pkg_func=git.write_git_changelog, filename='ChangeLog')), ('changelog_option_false', dict(option_key='skip_changelog', option_value='False', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, pkg_func=git.write_git_changelog, filename='ChangeLog')), ('changelog_env_true', dict(option_key='skip_changelog', option_value='False', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', pkg_func=git.write_git_changelog, filename='ChangeLog')), ('changelog_both_true', dict(option_key='skip_changelog', option_value='True', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', pkg_func=git.write_git_changelog, filename='ChangeLog')), ('authors_option_true', dict(option_key='skip_authors', option_value='True', env_key='SKIP_GENERATE_AUTHORS', env_value=None, pkg_func=git.generate_authors, filename='AUTHORS')), ('authors_option_false', dict(option_key='skip_authors', option_value='False', env_key='SKIP_GENERATE_AUTHORS', env_value=None, pkg_func=git.generate_authors, filename='AUTHORS')), ('authors_env_true', dict(option_key='skip_authors', option_value='False', env_key='SKIP_GENERATE_AUTHORS', env_value='True', pkg_func=git.generate_authors, filename='AUTHORS')), ('authors_both_true', dict(option_key='skip_authors', option_value='True', env_key='SKIP_GENERATE_AUTHORS', env_value='True', pkg_func=git.generate_authors, filename='AUTHORS')), ] def setUp(self): super(SkipFileWrites, self).setUp() self.temp_path = self.useFixture(fixtures.TempDir()).path self.root_dir = os.path.abspath(os.path.curdir) self.git_dir = os.path.join(self.root_dir, ".git") if not os.path.exists(self.git_dir): self.skipTest("%s is missing; skipping git-related checks" % self.git_dir) return self.filename = os.path.join(self.temp_path, self.filename) self.option_dict = dict() if self.option_key is not None: self.option_dict[self.option_key] = ('setup.cfg', self.option_value) self.useFixture( fixtures.EnvironmentVariable(self.env_key, self.env_value)) def test_skip(self): self.pkg_func(git_dir=self.git_dir, dest_dir=self.temp_path, option_dict=self.option_dict) self.assertEqual( not os.path.exists(self.filename), (self.option_value.lower() in options.TRUE_VALUES or self.env_value is not None)) _changelog_content = """7780758\x00Break parser\x00 (tag: refs/tags/1_foo.1) 04316fe\x00Make python\x00 (refs/heads/review/monty_taylor/27519) 378261a\x00Add an integration test script.\x00 3c373ac\x00Merge "Lib\x00 (HEAD, tag: refs/tags/2013.2.rc2, tag: refs/tags/2013.2, refs/heads/mile-proposed) 182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: refs/tags/0.5.17) fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: refs/tags/0.5.16) d1c53dd\x00Use pip instead of easy_install for installation.\x00 a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00 6c27ce7\x00Skip git-checkout related tests when .git is missing\x00 451e513\x00Bug fix: create_stack() fails when waiting\x00 4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: refs/tags/(evil)) d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: refs/tags/ev()il) c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: refs/tags/ev)il) 8696fbd\x00Improve test coverage: private extension API\x00 (tag: refs/tags/ev(il) f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: refs/tags/e(vi)l) 04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: refs/tags/(12), refs/heads/master) a65e8ee\x00Remove jinja pin.\x00 (tag: refs/tags/0.5.14, tag: refs/tags/0.5.13) """ # noqa def _make_old_git_changelog_format(line): """Convert post-1.8.1 git log format to pre-1.8.1 git log format""" if not line.strip(): return line sha, msg, refname = line.split('\x00') refname = refname.replace('tag: ', '') return '\x00'.join((sha, msg, refname)) _old_git_changelog_content = '\n'.join( _make_old_git_changelog_format(line) for line in _changelog_content.split('\n')) class GitLogsTest(base.BaseTestCase): scenarios = [ ('pre1.8.3', {'changelog': _old_git_changelog_content}), ('post1.8.3', {'changelog': _changelog_content}), ] def setUp(self): super(GitLogsTest, self).setUp() self.temp_path = self.useFixture(fixtures.TempDir()).path self.root_dir = os.path.abspath(os.path.curdir) self.git_dir = os.path.join(self.root_dir, ".git") self.useFixture( fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS')) self.useFixture( fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG')) def test_write_git_changelog(self): self.useFixture(fixtures.FakePopen(lambda _: { "stdout": BytesIO(self.changelog.encode('utf-8')) })) git.write_git_changelog(git_dir=self.git_dir, dest_dir=self.temp_path) with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh: changelog_contents = ch_fh.read() self.assertIn("2013.2", changelog_contents) self.assertIn("0.5.17", changelog_contents) self.assertIn("------", changelog_contents) self.assertIn("Refactor hooks file", changelog_contents) self.assertIn( r"Bug fix: create\_stack() fails when waiting", changelog_contents) self.assertNotIn("Refactor hooks file.", changelog_contents) self.assertNotIn("182feb3", changelog_contents) self.assertNotIn("review/monty_taylor/27519", changelog_contents) self.assertNotIn("0.5.13", changelog_contents) self.assertNotIn("0.6.7", changelog_contents) self.assertNotIn("12", changelog_contents) self.assertNotIn("(evil)", changelog_contents) self.assertNotIn("ev()il", changelog_contents) self.assertNotIn("ev(il", changelog_contents) self.assertNotIn("ev)il", changelog_contents) self.assertNotIn("e(vi)l", changelog_contents) self.assertNotIn('Merge "', changelog_contents) self.assertNotIn(r'1\_foo.1', changelog_contents) def test_generate_authors(self): author_old = u"Foo Foo " author_new = u"Bar Bar " co_author = u"Foo Bar " co_author_by = u"Co-authored-by: " + co_author git_log_cmd = ( "git --git-dir=%s log --format=%%aN <%%aE>" % self.git_dir) git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir) git_top_level = "git rev-parse --show-toplevel" cmd_map = { git_log_cmd: author_new, git_co_log_cmd: co_author_by, git_top_level: self.root_dir, } exist_files = [self.git_dir, os.path.join(self.temp_path, "AUTHORS.in")] self.useFixture(fixtures.MonkeyPatch( "os.path.exists", lambda path: os.path.abspath(path) in exist_files)) def _fake_run_shell_command(cmd, **kwargs): return cmd_map[" ".join(cmd)] self.useFixture(fixtures.MonkeyPatch( "pbr.git._run_shell_command", _fake_run_shell_command)) with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh: auth_fh.write("%s\n" % author_old) git.generate_authors(git_dir=self.git_dir, dest_dir=self.temp_path) with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh: authors = auth_fh.read() self.assertIn(author_old, authors) self.assertIn(author_new, authors) self.assertIn(co_author, authors) class _SphinxConfig(object): man_pages = ['foo'] class BaseSphinxTest(base.BaseTestCase): def setUp(self): super(BaseSphinxTest, self).setUp() # setup_command requires the Sphinx instance to have some # attributes that aren't set normally with the way we use the # class (because we replace the constructor). Add default # values directly to the class definition. import sphinx.application sphinx.application.Sphinx.messagelog = [] sphinx.application.Sphinx.statuscode = 0 self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.build", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.config", _SphinxConfig)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.init_values", lambda *a: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.__init__", lambda *a: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package",) self.distr.command_options["build_sphinx"] = { "source_dir": ["a", "."]} pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b""), ("another_fake_module_for_testing.py", b""), ("fake_private_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) self.distr.command_options["pbr"] = {} if hasattr(self, "excludes"): self.distr.command_options["pbr"]["autodoc_exclude_modules"] = ( 'setup.cfg', "fake_package.fake_private_module\n" "fake_package.another_fake_*\n" "fake_package.unknown_module") if hasattr(self, 'has_opt') and self.has_opt: options = self.distr.command_options["pbr"] options["autodoc_index_modules"] = ('setup.cfg', self.autodoc) class BuildSphinxTest(BaseSphinxTest): scenarios = [ ('true_autodoc_caps', dict(has_opt=True, autodoc='True', has_autodoc=True)), ('true_autodoc_caps_with_excludes', dict(has_opt=True, autodoc='True', has_autodoc=True, excludes="fake_package.fake_private_module\n" "fake_package.another_fake_*\n" "fake_package.unknown_module")), ('true_autodoc_lower', dict(has_opt=True, autodoc='true', has_autodoc=True)), ('false_autodoc', dict(has_opt=True, autodoc='False', has_autodoc=False)), ('no_autodoc', dict(has_opt=False, autodoc='False', has_autodoc=False)), ] def test_build_doc(self): build_doc = packaging.LocalBuildDoc(self.distr) build_doc.run() self.assertTrue( os.path.exists("api/autoindex.rst") == self.has_autodoc) self.assertTrue( os.path.exists( "api/fake_package.fake_module.rst") == self.has_autodoc) if not self.has_autodoc or hasattr(self, "excludes"): assertion = self.assertFalse else: assertion = self.assertTrue assertion( os.path.exists( "api/fake_package.fake_private_module.rst")) assertion( os.path.exists( "api/fake_package.another_fake_module_for_testing.rst")) def test_builders_config(self): build_doc = packaging.LocalBuildDoc(self.distr) build_doc.finalize_options() self.assertEqual(1, len(build_doc.builders)) self.assertIn('html', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = '' build_doc.finalize_options() self.assertEqual('', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = 'man' build_doc.finalize_options() self.assertEqual(1, len(build_doc.builders)) self.assertIn('man', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = 'html,man,doctest' build_doc.finalize_options() self.assertIn('html', build_doc.builders) self.assertIn('man', build_doc.builders) self.assertIn('doctest', build_doc.builders) def test_cmd_builder_override(self): if self.has_opt: self.distr.command_options["pbr"] = { "autodoc_index_modules": ('setup.cfg', self.autodoc) } self.distr.command_options["build_sphinx"]["builder"] = ( "command line", "non-existing-builder") build_doc = packaging.LocalBuildDoc(self.distr) self.assertNotIn('non-existing-builder', build_doc.builders) self.assertIn('html', build_doc.builders) # process command line options which should override config build_doc.finalize_options() self.assertIn('non-existing-builder', build_doc.builders) self.assertNotIn('html', build_doc.builders) def test_cmd_builder_override_multiple_builders(self): if self.has_opt: self.distr.command_options["pbr"] = { "autodoc_index_modules": ('setup.cfg', self.autodoc) } self.distr.command_options["build_sphinx"]["builder"] = ( "command line", "builder1,builder2") build_doc = packaging.LocalBuildDoc(self.distr) build_doc.finalize_options() self.assertEqual(["builder1", "builder2"], build_doc.builders) class APIAutoDocTest(base.BaseTestCase): def setUp(self): super(APIAutoDocTest, self).setUp() # setup_command requires the Sphinx instance to have some # attributes that aren't set normally with the way we use the # class (because we replace the constructor). Add default # values directly to the class definition. import sphinx.application sphinx.application.Sphinx.messagelog = [] sphinx.application.Sphinx.statuscode = 0 self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.build", lambda *a, **kw: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.application.Sphinx.config", _SphinxConfig)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.init_values", lambda *a: None)) self.useFixture(fixtures.MonkeyPatch( "sphinx.config.Config.__init__", lambda *a: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package",) self.distr.command_options["build_sphinx"] = { "source_dir": ["a", "."]} self.sphinx_options = self.distr.command_options["build_sphinx"] pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b""), ("another_fake_module_for_testing.py", b""), ("fake_private_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) self.pbr_options = self.distr.command_options.setdefault('pbr', {}) self.pbr_options["autodoc_index_modules"] = ('setup.cfg', 'True') def test_default_api_build_dir(self): build_doc = packaging.LocalBuildDoc(self.distr) build_doc.run() print('PBR OPTIONS:', self.pbr_options) print('DISTR OPTIONS:', self.distr.command_options) self.assertTrue(os.path.exists("api/autoindex.rst")) self.assertTrue(os.path.exists("api/fake_package.fake_module.rst")) self.assertTrue( os.path.exists( "api/fake_package.fake_private_module.rst")) self.assertTrue( os.path.exists( "api/fake_package.another_fake_module_for_testing.rst")) def test_different_api_build_dir(self): # Options have to come out of the settings dict as a tuple # showing the source and the value. self.pbr_options['api_doc_dir'] = (None, 'contributor/api') build_doc = packaging.LocalBuildDoc(self.distr) build_doc.run() print('PBR OPTIONS:', self.pbr_options) print('DISTR OPTIONS:', self.distr.command_options) self.assertTrue(os.path.exists("contributor/api/autoindex.rst")) self.assertTrue( os.path.exists("contributor/api/fake_package.fake_module.rst")) self.assertTrue( os.path.exists( "contributor/api/fake_package.fake_private_module.rst")) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_util.py0000664000175000017500000002450300000000000016724 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import tempfile import textwrap import six from six.moves import configparser import sys from pbr.tests import base from pbr import util def config_from_ini(ini): config = {} ini = textwrap.dedent(six.u(ini)) if sys.version_info >= (3, 2): parser = configparser.ConfigParser() parser.read_file(io.StringIO(ini)) else: parser = configparser.SafeConfigParser() parser.readfp(io.StringIO(ini)) for section in parser.sections(): config[section] = dict(parser.items(section)) return config class TestBasics(base.BaseTestCase): def test_basics(self): self.maxDiff = None config_text = """ [metadata] name = foo version = 1.0 author = John Doe author_email = jd@example.com maintainer = Jim Burke maintainer_email = jb@example.com home_page = http://example.com summary = A foobar project. description = Hello, world. This is a long description. download_url = http://opendev.org/x/pbr classifier = Development Status :: 5 - Production/Stable Programming Language :: Python platform = any license = Apache 2.0 requires_dist = Sphinx requests setup_requires_dist = docutils python_requires = >=3.6 provides_dist = bax provides_extras = bar obsoletes_dist = baz [files] packages_root = src packages = foo package_data = "" = *.txt, *.rst foo = *.msg namespace_packages = hello data_files = bitmaps = bm/b1.gif bm/b2.gif config = cfg/data.cfg scripts = scripts/hello-world.py modules = mod1 """ expected = { 'name': u'foo', 'version': u'1.0', 'author': u'John Doe', 'author_email': u'jd@example.com', 'maintainer': u'Jim Burke', 'maintainer_email': u'jb@example.com', 'url': u'http://example.com', 'description': u'A foobar project.', 'long_description': u'Hello, world. This is a long description.', 'download_url': u'http://opendev.org/x/pbr', 'classifiers': [ u'Development Status :: 5 - Production/Stable', u'Programming Language :: Python', ], 'platforms': [u'any'], 'license': u'Apache 2.0', 'install_requires': [ u'Sphinx', u'requests', ], 'setup_requires': [u'docutils'], 'python_requires': u'>=3.6', 'provides': [u'bax'], 'provides_extras': [u'bar'], 'obsoletes': [u'baz'], 'extras_require': {}, 'package_dir': {'': u'src'}, 'packages': [u'foo'], 'package_data': { '': ['*.txt,', '*.rst'], 'foo': ['*.msg'], }, 'namespace_packages': [u'hello'], 'data_files': [ ('bitmaps', ['bm/b1.gif', 'bm/b2.gif']), ('config', ['cfg/data.cfg']), ], 'scripts': [u'scripts/hello-world.py'], 'py_modules': [u'mod1'], } config = config_from_ini(config_text) actual = util.setup_cfg_to_setup_kwargs(config) self.assertDictEqual(expected, actual) class TestExtrasRequireParsingScenarios(base.BaseTestCase): scenarios = [ ('simple_extras', { 'config_text': """ [extras] first = foo bar==1.0 second = baz>=3.2 foo """, 'expected_extra_requires': { 'first': ['foo', 'bar==1.0'], 'second': ['baz>=3.2', 'foo'], 'test': ['requests-mock'], "test:(python_version=='2.6')": ['ordereddict'], } }), ('with_markers', { 'config_text': """ [extras] test = foo:python_version=='2.6' bar baz<1.6 :python_version=='2.6' zaz :python_version>'1.0' """, 'expected_extra_requires': { "test:(python_version=='2.6')": ['foo', 'baz<1.6'], "test": ['bar', 'zaz']}}), ('no_extras', { 'config_text': """ [metadata] long_description = foo """, 'expected_extra_requires': {} })] def test_extras_parsing(self): config = config_from_ini(self.config_text) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(self.expected_extra_requires, kwargs['extras_require']) class TestInvalidMarkers(base.BaseTestCase): def test_invalid_marker_raises_error(self): config = {'extras': {'test': "foo :bad_marker>'1.0'"}} self.assertRaises(SyntaxError, util.setup_cfg_to_setup_kwargs, config) class TestMapFieldsParsingScenarios(base.BaseTestCase): scenarios = [ ('simple_project_urls', { 'config_text': """ [metadata] project_urls = Bug Tracker = https://bugs.launchpad.net/pbr/ Documentation = https://docs.openstack.org/pbr/ Source Code = https://opendev.org/openstack/pbr """, # noqa: E501 'expected_project_urls': { 'Bug Tracker': 'https://bugs.launchpad.net/pbr/', 'Documentation': 'https://docs.openstack.org/pbr/', 'Source Code': 'https://opendev.org/openstack/pbr', }, }), ('query_parameters', { 'config_text': """ [metadata] project_urls = Bug Tracker = https://bugs.launchpad.net/pbr/?query=true Documentation = https://docs.openstack.org/pbr/?foo=bar Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash """, # noqa: E501 'expected_project_urls': { 'Bug Tracker': 'https://bugs.launchpad.net/pbr/?query=true', 'Documentation': 'https://docs.openstack.org/pbr/?foo=bar', 'Source Code': 'https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash', # noqa: E501 }, }), ] def test_project_url_parsing(self): config = config_from_ini(self.config_text) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(self.expected_project_urls, kwargs['project_urls']) class TestKeywordsParsingScenarios(base.BaseTestCase): scenarios = [ ('keywords_list', { 'config_text': """ [metadata] keywords = one two three """, # noqa: E501 'expected_keywords': ['one', 'two', 'three'], }, ), ('inline_keywords', { 'config_text': """ [metadata] keywords = one, two, three """, # noqa: E501 'expected_keywords': ['one, two, three'], }), ] def test_keywords_parsing(self): config = config_from_ini(self.config_text) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(self.expected_keywords, kwargs['keywords']) class TestProvidesExtras(base.BaseTestCase): def test_provides_extras(self): ini = """ [metadata] provides_extras = foo bar """ config = config_from_ini(ini) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(['foo', 'bar'], kwargs['provides_extras']) class TestDataFilesParsing(base.BaseTestCase): scenarios = [ ('data_files', { 'config_text': """ [files] data_files = 'i like spaces/' = 'dir with space/file with spc 2' 'dir with space/file with spc 1' """, 'data_files': [ ('i like spaces/', ['dir with space/file with spc 2', 'dir with space/file with spc 1']) ] })] def test_handling_of_whitespace_in_data_files(self): config = config_from_ini(self.config_text) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(self.data_files, kwargs['data_files']) class TestUTF8DescriptionFile(base.BaseTestCase): def test_utf8_description_file(self): _, path = tempfile.mkstemp() ini_template = """ [metadata] description_file = %s """ # Two \n's because pbr strips the file content and adds \n\n # This way we can use it directly as the assert comparison unicode_description = u'UTF8 description: é"…-ʃŋ\'\n\n' ini = ini_template % path with io.open(path, 'w', encoding='utf8') as f: f.write(unicode_description) config = config_from_ini(ini) kwargs = util.setup_cfg_to_setup_kwargs(config) self.assertEqual(unicode_description, kwargs['long_description']) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_version.py0000664000175000017500000003342400000000000017436 0ustar00zuulzuul00000000000000# Copyright 2012 Red Hat, Inc. # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from testtools import matchers from pbr.tests import base from pbr import version from_pip_string = version.SemanticVersion.from_pip_string class TestSemanticVersion(base.BaseTestCase): def test_ordering(self): ordered_versions = [ "1.2.3.dev6", "1.2.3.dev7", "1.2.3.a4.dev12", "1.2.3.a4.dev13", "1.2.3.a4", "1.2.3.a5.dev1", "1.2.3.a5", "1.2.3.b3.dev1", "1.2.3.b3", "1.2.3.rc2.dev1", "1.2.3.rc2", "1.2.3.rc3.dev1", "1.2.3", "1.2.4", "1.3.3", "2.2.3", ] for v in ordered_versions: sv = version.SemanticVersion.from_pip_string(v) self.expectThat(sv, matchers.Equals(sv)) for left, right in itertools.combinations(ordered_versions, 2): l_pos = ordered_versions.index(left) r_pos = ordered_versions.index(right) if l_pos < r_pos: m1 = matchers.LessThan m2 = matchers.GreaterThan else: m1 = matchers.GreaterThan m2 = matchers.LessThan left_sv = version.SemanticVersion.from_pip_string(left) right_sv = version.SemanticVersion.from_pip_string(right) self.expectThat(left_sv, m1(right_sv)) self.expectThat(right_sv, m2(left_sv)) def test_from_pip_string_legacy_alpha(self): expected = version.SemanticVersion( 1, 2, 0, prerelease_type='rc', prerelease=1) parsed = from_pip_string('1.2.0rc1') self.assertEqual(expected, parsed) def test_from_pip_string_legacy_postN(self): # When pbr trunk was incompatible with PEP-440, a stable release was # made that used postN versions to represent developer builds. As # we expect only to be parsing versions of our own, we map those # into dev builds of the next version. expected = version.SemanticVersion(1, 2, 4, dev_count=5) parsed = from_pip_string('1.2.3.post5') self.expectThat(expected, matchers.Equals(parsed)) expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) parsed = from_pip_string('1.2.3.0a4.post6') self.expectThat(expected, matchers.Equals(parsed)) # We can't define a mapping for .postN.devM, so it should raise. self.expectThat( lambda: from_pip_string('1.2.3.post5.dev6'), matchers.raises(ValueError)) def test_from_pip_string_v_version(self): parsed = from_pip_string('v1.2.3') expected = version.SemanticVersion(1, 2, 3) self.expectThat(expected, matchers.Equals(parsed)) expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) parsed = from_pip_string('V1.2.3.0a4.post6') self.expectThat(expected, matchers.Equals(parsed)) self.expectThat( lambda: from_pip_string('x1.2.3'), matchers.raises(ValueError)) def test_from_pip_string_legacy_nonzero_lead_in(self): # reported in bug 1361251 expected = version.SemanticVersion( 0, 0, 1, prerelease_type='a', prerelease=2) parsed = from_pip_string('0.0.1a2') self.assertEqual(expected, parsed) def test_from_pip_string_legacy_short_nonzero_lead_in(self): expected = version.SemanticVersion( 0, 1, 0, prerelease_type='a', prerelease=2) parsed = from_pip_string('0.1a2') self.assertEqual(expected, parsed) def test_from_pip_string_legacy_no_0_prerelease(self): expected = version.SemanticVersion( 2, 1, 0, prerelease_type='rc', prerelease=1) parsed = from_pip_string('2.1.0.rc1') self.assertEqual(expected, parsed) def test_from_pip_string_legacy_no_0_prerelease_2(self): expected = version.SemanticVersion( 2, 0, 0, prerelease_type='rc', prerelease=1) parsed = from_pip_string('2.0.0.rc1') self.assertEqual(expected, parsed) def test_from_pip_string_legacy_non_440_beta(self): expected = version.SemanticVersion( 2014, 2, prerelease_type='b', prerelease=2) parsed = from_pip_string('2014.2.b2') self.assertEqual(expected, parsed) def test_from_pip_string_pure_git_hash(self): self.assertRaises(ValueError, from_pip_string, '6eed5ae') def test_from_pip_string_non_digit_start(self): self.assertRaises(ValueError, from_pip_string, 'non-release-tag/2014.12.16-1') def test_final_version(self): semver = version.SemanticVersion(1, 2, 3) self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple()) self.assertEqual("1.2.3", semver.brief_string()) self.assertEqual("1.2.3", semver.debian_string()) self.assertEqual("1.2.3", semver.release_string()) self.assertEqual("1.2.3", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.3")) def test_parsing_short_forms(self): semver = version.SemanticVersion(1, 0, 0) self.assertEqual(semver, from_pip_string("1")) self.assertEqual(semver, from_pip_string("1.0")) self.assertEqual(semver, from_pip_string("1.0.0")) def test_dev_version(self): semver = version.SemanticVersion(1, 2, 4, dev_count=5) self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~dev5", semver.debian_string()) self.assertEqual("1.2.4.dev5", semver.release_string()) self.assertEqual("1.2.3.dev5", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.dev5")) def test_dev_no_git_version(self): semver = version.SemanticVersion(1, 2, 4, dev_count=5) self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~dev5", semver.debian_string()) self.assertEqual("1.2.4.dev5", semver.release_string()) self.assertEqual("1.2.3.dev5", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.dev5")) def test_dev_zero_version(self): semver = version.SemanticVersion(1, 2, 0, dev_count=5) self.assertEqual((1, 2, 0, 'dev', 4), semver.version_tuple()) self.assertEqual("1.2.0", semver.brief_string()) self.assertEqual("1.2.0~dev5", semver.debian_string()) self.assertEqual("1.2.0.dev5", semver.release_string()) self.assertEqual("1.1.9999.dev5", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.0.dev5")) def test_alpha_dev_version(self): semver = version.SemanticVersion(1, 2, 4, 'a', 1, 12) self.assertEqual((1, 2, 4, 'alphadev', 12), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~a1.dev12", semver.debian_string()) self.assertEqual("1.2.4.0a1.dev12", semver.release_string()) self.assertEqual("1.2.3.a1.dev12", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0a1.dev12")) def test_alpha_version(self): semver = version.SemanticVersion(1, 2, 4, 'a', 1) self.assertEqual((1, 2, 4, 'alpha', 1), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~a1", semver.debian_string()) self.assertEqual("1.2.4.0a1", semver.release_string()) self.assertEqual("1.2.3.a1", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0a1")) def test_alpha_zero_version(self): semver = version.SemanticVersion(1, 2, 0, 'a', 1) self.assertEqual((1, 2, 0, 'alpha', 1), semver.version_tuple()) self.assertEqual("1.2.0", semver.brief_string()) self.assertEqual("1.2.0~a1", semver.debian_string()) self.assertEqual("1.2.0.0a1", semver.release_string()) self.assertEqual("1.1.9999.a1", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.0.0a1")) def test_alpha_major_zero_version(self): semver = version.SemanticVersion(1, 0, 0, 'a', 1) self.assertEqual((1, 0, 0, 'alpha', 1), semver.version_tuple()) self.assertEqual("1.0.0", semver.brief_string()) self.assertEqual("1.0.0~a1", semver.debian_string()) self.assertEqual("1.0.0.0a1", semver.release_string()) self.assertEqual("0.9999.9999.a1", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.0.0.0a1")) def test_alpha_default_version(self): semver = version.SemanticVersion(1, 2, 4, 'a') self.assertEqual((1, 2, 4, 'alpha', 0), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~a0", semver.debian_string()) self.assertEqual("1.2.4.0a0", semver.release_string()) self.assertEqual("1.2.3.a0", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0a0")) def test_beta_dev_version(self): semver = version.SemanticVersion(1, 2, 4, 'b', 1, 12) self.assertEqual((1, 2, 4, 'betadev', 12), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~b1.dev12", semver.debian_string()) self.assertEqual("1.2.4.0b1.dev12", semver.release_string()) self.assertEqual("1.2.3.b1.dev12", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0b1.dev12")) def test_beta_version(self): semver = version.SemanticVersion(1, 2, 4, 'b', 1) self.assertEqual((1, 2, 4, 'beta', 1), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~b1", semver.debian_string()) self.assertEqual("1.2.4.0b1", semver.release_string()) self.assertEqual("1.2.3.b1", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0b1")) def test_decrement_nonrelease(self): # The prior version of any non-release is a release semver = version.SemanticVersion(1, 2, 4, 'b', 1) self.assertEqual( version.SemanticVersion(1, 2, 3), semver.decrement()) def test_decrement_nonrelease_zero(self): # We set an arbitrary max version of 9999 when decrementing versions # - this is part of handling rpm support. semver = version.SemanticVersion(1, 0, 0) self.assertEqual( version.SemanticVersion(0, 9999, 9999), semver.decrement()) def test_decrement_release(self): # The next patch version of a release version requires a change to the # patch level. semver = version.SemanticVersion(2, 2, 5) self.assertEqual( version.SemanticVersion(2, 2, 4), semver.decrement()) def test_increment_nonrelease(self): # The next patch version of a non-release version is another # non-release version as the next release doesn't need to be # incremented. semver = version.SemanticVersion(1, 2, 4, 'b', 1) self.assertEqual( version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment()) # Major and minor increments however need to bump things. self.assertEqual( version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) self.assertEqual( version.SemanticVersion(2, 0, 0), semver.increment(major=True)) def test_increment_release(self): # The next patch version of a release version requires a change to the # patch level. semver = version.SemanticVersion(1, 2, 5) self.assertEqual( version.SemanticVersion(1, 2, 6), semver.increment()) self.assertEqual( version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) self.assertEqual( version.SemanticVersion(2, 0, 0), semver.increment(major=True)) def test_rc_dev_version(self): semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12) self.assertEqual((1, 2, 4, 'candidatedev', 12), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~rc1.dev12", semver.debian_string()) self.assertEqual("1.2.4.0rc1.dev12", semver.release_string()) self.assertEqual("1.2.3.rc1.dev12", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0rc1.dev12")) def test_rc_version(self): semver = version.SemanticVersion(1, 2, 4, 'rc', 1) self.assertEqual((1, 2, 4, 'candidate', 1), semver.version_tuple()) self.assertEqual("1.2.4", semver.brief_string()) self.assertEqual("1.2.4~rc1", semver.debian_string()) self.assertEqual("1.2.4.0rc1", semver.release_string()) self.assertEqual("1.2.3.rc1", semver.rpm_string()) self.assertEqual(semver, from_pip_string("1.2.4.0rc1")) def test_to_dev(self): self.assertEqual( version.SemanticVersion(1, 2, 3, dev_count=1), version.SemanticVersion(1, 2, 3).to_dev(1)) self.assertEqual( version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1), version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/test_wsgi.py0000664000175000017500000001315500000000000016721 0ustar00zuulzuul00000000000000# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import subprocess import sys try: # python 2 from urllib2 import urlopen except ImportError: # python 3 from urllib.request import urlopen from pbr.tests import base class TestWsgiScripts(base.BaseTestCase): cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class') def _get_path(self): if os.path.isdir("%s/lib64" % self.temp_dir): path = "%s/lib64" % self.temp_dir elif os.path.isdir("%s/lib" % self.temp_dir): path = "%s/lib" % self.temp_dir elif os.path.isdir("%s/site-packages" % self.temp_dir): return ".:%s/site-packages" % self.temp_dir else: raise Exception("Could not determine path for test") return ".:%s/python%s.%s/site-packages" % ( path, sys.version_info[0], sys.version_info[1]) def test_wsgi_script_install(self): """Test that we install a non-pkg-resources wsgi script.""" if os.name == 'nt': self.skipTest('Windows support is passthrough') stdout, _, return_code = self.run_setup( 'install', '--prefix=%s' % self.temp_dir) self._check_wsgi_install_content(stdout) def test_wsgi_script_run(self): """Test that we install a runnable wsgi script. This test actually attempts to start and interact with the wsgi script in question to demonstrate that it's a working wsgi script using simple server. """ if os.name == 'nt': self.skipTest('Windows support is passthrough') stdout, _, return_code = self.run_setup( 'install', '--prefix=%s' % self.temp_dir) self._check_wsgi_install_content(stdout) # Live test run the scripts and see that they respond to wsgi # requests. for cmd_name in self.cmd_names: self._test_wsgi(cmd_name, b'Hello World') def _test_wsgi(self, cmd_name, output, extra_args=None): cmd = os.path.join(self.temp_dir, 'bin', cmd_name) print("Running %s -p 0 -b 127.0.0.1" % cmd) popen_cmd = [cmd, '-p', '0', '-b', '127.0.0.1'] if extra_args: popen_cmd.extend(extra_args) env = {'PYTHONPATH': self._get_path()} p = subprocess.Popen(popen_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.temp_dir, env=env) self.addCleanup(p.kill) stdoutdata = p.stdout.readline() # ****... stdoutdata = p.stdout.readline() # STARTING test server... self.assertIn( b"STARTING test server pbr_testpackage.wsgi", stdoutdata) stdoutdata = p.stdout.readline() # Available at ... print(stdoutdata) m = re.search(br'(http://[^:]+:\d+)/', stdoutdata) self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata) stdoutdata = p.stdout.readline() # DANGER! ... self.assertIn( b"DANGER! For testing only, do not use in production", stdoutdata) stdoutdata = p.stdout.readline() # ***... f = urlopen(m.group(1).decode('utf-8')) self.assertEqual(output, f.read()) # Request again so that the application can force stderr.flush(), # otherwise the log is buffered and the next readline() will hang. urlopen(m.group(1).decode('utf-8')) stdoutdata = p.stderr.readline() # we should have logged an HTTP request, return code 200, that # returned the right amount of bytes status = '"GET / HTTP/1.1" 200 %d' % len(output) self.assertIn(status.encode('utf-8'), stdoutdata) def _check_wsgi_install_content(self, install_stdout): for cmd_name in self.cmd_names: install_txt = 'Installing %s script to %s' % (cmd_name, self.temp_dir) self.assertIn(install_txt, install_stdout) cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name) script_txt = open(cmd_filename, 'r').read() self.assertNotIn('pkg_resources', script_txt) main_block = """if __name__ == "__main__": import argparse import socket import sys import wsgiref.simple_server as wss""" if cmd_name == 'pbr_test_wsgi': app_name = "main" else: app_name = "WSGI.app" starting_block = ("STARTING test server pbr_testpackage.wsgi." "%s" % app_name) else_block = """else: application = None""" self.assertIn(main_block, script_txt) self.assertIn(starting_block, script_txt) self.assertIn(else_block, script_txt) def test_with_argument(self): if os.name == 'nt': self.skipTest('Windows support is passthrough') stdout, _, return_code = self.run_setup( 'install', '--prefix=%s' % self.temp_dir) self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"]) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9019606 pbr-5.11.1/pbr/tests/testpackage/0000775000175000017500000000000000000000000016625 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/CHANGES.txt0000664000175000017500000000766400000000000020453 0ustar00zuulzuul00000000000000Changelog =========== 0.3 (unreleased) ------------------ - The ``glob_data_files`` hook became a pre-command hook for the install_data command instead of being a setup-hook. This is to support the additional functionality of requiring data_files with relative destination paths to be install relative to the package's install path (i.e. site-packages). - Dropped support for and deprecated the easier_install custom command. Although it should still work, it probably won't be used anymore for stsci_python packages. - Added support for the ``build_optional_ext`` command, which replaces/extends the default ``build_ext`` command. See the README for more details. - Added the ``tag_svn_revision`` setup_hook as a replacement for the setuptools-specific tag_svn_revision option to the egg_info command. This new hook is easier to use than the old tag_svn_revision option: It's automatically enabled by the presence of ``.dev`` in the version string, and disabled otherwise. - The ``svn_info_pre_hook`` and ``svn_info_post_hook`` have been replaced with ``version_pre_command_hook`` and ``version_post_command_hook`` respectively. However, a new ``version_setup_hook``, which has the same purpose, has been added. It is generally easier to use and will give more consistent results in that it will run every time setup.py is run, regardless of which command is used. ``stsci.distutils`` itself uses this hook--see the `setup.cfg` file and `stsci/distutils/__init__.py` for example usage. - Instead of creating an `svninfo.py` module, the new ``version_`` hooks create a file called `version.py`. In addition to the SVN info that was included in `svninfo.py`, it includes a ``__version__`` variable to be used by the package's `__init__.py`. This allows there to be a hard-coded ``__version__`` variable included in the source code, rather than using pkg_resources to get the version. - In `version.py`, the variables previously named ``__svn_version__`` and ``__full_svn_info__`` are now named ``__svn_revision__`` and ``__svn_full_info__``. - Fixed a bug when using stsci.distutils in the installation of other packages in the ``stsci.*`` namespace package. If stsci.distutils was not already installed, and was downloaded automatically by distribute through the setup_requires option, then ``stsci.distutils`` would fail to import. This is because the way the namespace package (nspkg) mechanism currently works, all packages belonging to the nspkg *must* be on the import path at initial import time. So when installing stsci.tools, for example, if ``stsci.tools`` is imported from within the source code at install time, but before ``stsci.distutils`` is downloaded and added to the path, the ``stsci`` package is already imported and can't be extended to include the path of ``stsci.distutils`` after the fact. The easiest way of dealing with this, it seems, is to delete ``stsci`` from ``sys.modules``, which forces it to be reimported, now the its ``__path__`` extended to include ``stsci.distutil``'s path. 0.2.2 (2011-11-09) ------------------ - Fixed check for the issue205 bug on actual setuptools installs; before it only worked on distribute. setuptools has the issue205 bug prior to version 0.6c10. - Improved the fix for the issue205 bug, especially on setuptools. setuptools, prior to 0.6c10, did not back of sys.modules either before sandboxing, which causes serious problems. In fact, it's so bad that it's not enough to add a sys.modules backup to the current sandbox: It's in fact necessary to monkeypatch setuptools.sandbox.run_setup so that any subsequent calls to it also back up sys.modules. 0.2.1 (2011-09-02) ------------------ - Fixed the dependencies so that setuptools is requirement but 'distribute' specifically. Previously installation could fail if users had plain setuptools installed and not distribute 0.2 (2011-08-23) ------------------ - Initial public release ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/LICENSE.txt0000664000175000017500000000267000000000000020455 0ustar00zuulzuul00000000000000Copyright (C) 2005 Association of Universities for Research in Astronomy (AURA) Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of AURA and its representatives may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/MANIFEST.in0000664000175000017500000000006600000000000020365 0ustar00zuulzuul00000000000000include data_files/* exclude pbr_testpackage/extra.py ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/README.txt0000664000175000017500000001502200000000000020323 0ustar00zuulzuul00000000000000Introduction ============ This package contains utilities used to package some of STScI's Python projects; specifically those projects that comprise stsci_python_ and Astrolib_. It currently consists mostly of some setup_hook scripts meant for use with `distutils2/packaging`_ and/or pbr_, and a customized easy_install command meant for use with distribute_. This package is not meant for general consumption, though it might be worth looking at for examples of how to do certain things with your own packages, but YMMV. Features ======== Hook Scripts ------------ Currently the main features of this package are a couple of setup_hook scripts. In distutils2, a setup_hook is a script that runs at the beginning of any pysetup command, and can modify the package configuration read from setup.cfg. There are also pre- and post-command hooks that only run before/after a specific setup command (eg. build_ext, install) is run. stsci.distutils.hooks.use_packages_root ''''''''''''''''''''''''''''''''''''''' If using the ``packages_root`` option under the ``[files]`` section of setup.cfg, this hook will add that path to ``sys.path`` so that modules in your package can be imported and used in setup. This can be used even if ``packages_root`` is not specified--in this case it adds ``''`` to ``sys.path``. stsci.distutils.hooks.version_setup_hook '''''''''''''''''''''''''''''''''''''''' Creates a Python module called version.py which currently contains four variables: * ``__version__`` (the release version) * ``__svn_revision__`` (the SVN revision info as returned by the ``svnversion`` command) * ``__svn_full_info__`` (as returned by the ``svn info`` command) * ``__setup_datetime__`` (the date and time that setup.py was last run). These variables can be imported in the package's `__init__.py` for degugging purposes. The version.py module will *only* be created in a package that imports from the version module in its `__init__.py`. It should be noted that this is generally preferable to writing these variables directly into `__init__.py`, since this provides more control and is less likely to unexpectedly break things in `__init__.py`. stsci.distutils.hooks.version_pre_command_hook '''''''''''''''''''''''''''''''''''''''''''''' Identical to version_setup_hook, but designed to be used as a pre-command hook. stsci.distutils.hooks.version_post_command_hook ''''''''''''''''''''''''''''''''''''''''''''''' The complement to version_pre_command_hook. This will delete any version.py files created during a build in order to prevent them from cluttering an SVN working copy (note, however, that version.py is *not* deleted from the build/ directory, so a copy of it is still preserved). It will also not be deleted if the current directory is not an SVN working copy. For example, if source code extracted from a source tarball it will be preserved. stsci.distutils.hooks.tag_svn_revision '''''''''''''''''''''''''''''''''''''' A setup_hook to add the SVN revision of the current working copy path to the package version string, but only if the version ends in .dev. For example, ``mypackage-1.0.dev`` becomes ``mypackage-1.0.dev1234``. This is in accordance with the version string format standardized by PEP 386. This should be used as a replacement for the ``tag_svn_revision`` option to the egg_info command. This hook is more compatible with packaging/distutils2, which does not include any VCS support. This hook is also more flexible in that it turns the revision number on/off depending on the presence of ``.dev`` in the version string, so that it's not automatically added to the version in final releases. This hook does require the ``svnversion`` command to be available in order to work. It does not examine the working copy metadata directly. stsci.distutils.hooks.numpy_extension_hook '''''''''''''''''''''''''''''''''''''''''' This is a pre-command hook for the build_ext command. To use it, add a ``[build_ext]`` section to your setup.cfg, and add to it:: pre-hook.numpy-extension-hook = stsci.distutils.hooks.numpy_extension_hook This hook must be used to build extension modules that use Numpy. The primary side-effect of this hook is to add the correct numpy include directories to `include_dirs`. To use it, add 'numpy' to the 'include-dirs' option of each extension module that requires numpy to build. The value 'numpy' will be replaced with the actual path to the numpy includes. stsci.distutils.hooks.is_display_option ''''''''''''''''''''''''''''''''''''''' This is not actually a hook, but is a useful utility function that can be used in writing other hooks. Basically, it returns ``True`` if setup.py was run with a "display option" such as --version or --help. This can be used to prevent your hook from running in such cases. stsci.distutils.hooks.glob_data_files ''''''''''''''''''''''''''''''''''''' A pre-command hook for the install_data command. Allows filename wildcards as understood by ``glob.glob()`` to be used in the data_files option. This hook must be used in order to have this functionality since it does not normally exist in distutils. This hook also ensures that data files are installed relative to the package path. data_files shouldn't normally be installed this way, but the functionality is required for a few special cases. Commands -------- build_optional_ext '''''''''''''''''' This serves as an optional replacement for the default built_ext command, which compiles C extension modules. Its purpose is to allow extension modules to be *optional*, so that if their build fails the rest of the package is still allowed to be built and installed. This can be used when an extension module is not definitely required to use the package. To use this custom command, add:: commands = stsci.distutils.command.build_optional_ext.build_optional_ext under the ``[global]`` section of your package's setup.cfg. Then, to mark an individual extension module as optional, under the setup.cfg section for that extension add:: optional = True Optionally, you may also add a custom failure message by adding:: fail_message = The foobar extension module failed to compile. This could be because you lack such and such headers. This package will still work, but such and such features will be disabled. .. _stsci_python: http://www.stsci.edu/resources/software_hardware/pyraf/stsci_python .. _Astrolib: http://www.scipy.org/AstroLib/ .. _distutils2/packaging: http://distutils2.notmyidea.org/ .. _d2to1: http://pypi.python.org/pypi/d2to1 .. _distribute: http://pypi.python.org/pypi/distribute ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9019606 pbr-5.11.1/pbr/tests/testpackage/data_files/0000775000175000017500000000000000000000000020720 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/data_files/a.txt0000664000175000017500000000000000000000000021667 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/data_files/b.txt0000664000175000017500000000000000000000000021670 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/data_files/c.rst0000664000175000017500000000000000000000000021662 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8859599 pbr-5.11.1/pbr/tests/testpackage/doc/0000775000175000017500000000000000000000000017372 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9019606 pbr-5.11.1/pbr/tests/testpackage/doc/source/0000775000175000017500000000000000000000000020672 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/doc/source/conf.py0000664000175000017500000000357000000000000022176 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', ] # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'testpackage' copyright = u'2013, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/doc/source/index.rst0000664000175000017500000000073700000000000022542 0ustar00zuulzuul00000000000000.. testpackage documentation master file, created by sphinx-quickstart on Tue Jul 9 22:26:36 2013. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to testpackage's documentation! ======================================================== Contents: .. toctree:: :maxdepth: 2 installation usage Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/doc/source/installation.rst0000664000175000017500000000031200000000000024121 0ustar00zuulzuul00000000000000============ Installation ============ At the command line:: $ pip install testpackage Or, if you have virtualenvwrapper installed:: $ mkvirtualenv testpackage $ pip install testpackage ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/doc/source/usage.rst0000664000175000017500000000012300000000000022524 0ustar00zuulzuul00000000000000======== Usage ======== To use testpackage in a project:: import testpackage ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/extra-file.txt0000664000175000017500000000000000000000000021414 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/git-extra-file.txt0000664000175000017500000000000000000000000022175 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9059608 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/0000775000175000017500000000000000000000000021763 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/__init__.py0000664000175000017500000000013600000000000024074 0ustar00zuulzuul00000000000000import pbr.version __version__ = pbr.version.VersionInfo('pbr_testpackage').version_string() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py0000664000175000017500000000440600000000000025043 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS from distutils.command import build_py def test_hook_1(config): print('test_hook_1') def test_hook_2(config): print('test_hook_2') class test_command(build_py.build_py): command_name = 'build_py' def run(self): print('Running custom build_py command.') return build_py.build_py.run(self) def test_pre_hook(cmdobj): print('build_ext pre-hook') def test_post_hook(cmdobj): print('build_ext post-hook') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/cmd.py0000664000175000017500000000143600000000000023104 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function def main(): print("PBR Test Command") class Foo(object): @classmethod def bar(self): print("PBR Test Command - with class!") ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/extra.py0000664000175000017500000000000000000000000023446 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9059608 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/package_data/0000775000175000017500000000000000000000000024347 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt0000664000175000017500000000000000000000000025236 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt0000664000175000017500000000000000000000000025237 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/pbr_testpackage/wsgi.py0000664000175000017500000000245100000000000023310 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import argparse import functools import sys def application(env, start_response, data): sys.stderr.flush() # Force the previous request log to be written. start_response('200 OK', [('Content-Type', 'text/html')]) return [data.encode('utf-8')] def main(): parser = argparse.ArgumentParser(description='Return a string.') parser.add_argument('--content', '-c', help='String returned', default='Hello World') args = parser.parse_args() return functools.partial(application, data=args.content) class WSGI(object): @classmethod def app(self): return functools.partial(application, data='Hello World') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/setup.cfg0000664000175000017500000000327100000000000020451 0ustar00zuulzuul00000000000000[metadata] name = pbr_testpackage # TODO(lifeless) we should inject this as needed otherwise we're not truely # testing postversioned codepaths. version = 0.1.dev author = OpenStack author-email = openstack-discuss@lists.openstack.org home-page = http://pypi.python.org/pypi/pbr project_urls = Bug Tracker = https://bugs.launchpad.net/pbr/ Documentation = https://docs.openstack.org/pbr/ Source Code = https://opendev.org/openstack/pbr summary = Test package for testing pbr description-file = README.txt CHANGES.txt description-content-type = text/plain; charset=UTF-8 python-requires = >=2.5 requires-dist = setuptools classifier = Development Status :: 3 - Alpha Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python Topic :: Scientific/Engineering Topic :: Software Development :: Build Tools Topic :: Software Development :: Libraries :: Python Modules Topic :: System :: Archiving :: Packaging keywords = packaging, distutils, setuptools [files] packages = pbr_testpackage package-data = testpackage = package_data/*.txt data-files = testpackage/data_files = data_files/* extra-files = extra-file.txt [entry_points] console_scripts = pbr_test_cmd = pbr_testpackage.cmd:main pbr_test_cmd_with_class = pbr_testpackage.cmd:Foo.bar wsgi_scripts = pbr_test_wsgi = pbr_testpackage.wsgi:main pbr_test_wsgi_with_class = pbr_testpackage.wsgi:WSGI.app [extension=pbr_testpackage.testext] sources = src/testext.c optional = True [global] #setup-hooks = # pbr_testpackage._setup_hooks.test_hook_1 # pbr_testpackage._setup_hooks.test_hook_2 commands = pbr_testpackage._setup_hooks.test_command ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/setup.py0000664000175000017500000000126400000000000020342 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, ) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9059608 pbr-5.11.1/pbr/tests/testpackage/src/0000775000175000017500000000000000000000000017414 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/src/testext.c0000664000175000017500000000124100000000000021256 0ustar00zuulzuul00000000000000#include static PyMethodDef TestextMethods[] = { {NULL, NULL, 0, NULL} }; #if PY_MAJOR_VERSION >=3 static struct PyModuleDef testextmodule = { PyModuleDef_HEAD_INIT, /* This should correspond to a PyModuleDef_Base type */ "testext", /* This is the module name */ "Test extension module", /* This is the module docstring */ -1, /* This defines the size of the module and says everything is global */ TestextMethods /* This is the method definition */ }; PyObject* PyInit_testext(void) { return PyModule_Create(&testextmodule); } #else PyMODINIT_FUNC inittestext(void) { Py_InitModule("testext", TestextMethods); } #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/testpackage/test-requirements.txt0000664000175000017500000000006000000000000023062 0ustar00zuulzuul00000000000000ordereddict;python_version=='2.6' requests-mock ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/tests/util.py0000664000175000017500000000514600000000000015667 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import contextlib import os import shutil import stat import sys try: import ConfigParser as configparser except ImportError: import configparser @contextlib.contextmanager def open_config(filename): if sys.version_info >= (3, 2): cfg = configparser.ConfigParser() else: cfg = configparser.SafeConfigParser() cfg.read(filename) yield cfg with open(filename, 'w') as fp: cfg.write(fp) def rmtree(path): """shutil.rmtree() with error handler. Handle 'access denied' from trying to delete read-only files. """ def onerror(func, path, exc_info): if not os.access(path, os.W_OK): os.chmod(path, stat.S_IWUSR) func(path) else: raise return shutil.rmtree(path, onerror=onerror) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/util.py0000664000175000017500000005601000000000000014521 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. """The code in this module is mostly copy/pasted out of the distutils2 source code, as recommended by Tarek Ziade. As such, it may be subject to some change as distutils2 development continues, and will have to be kept up to date. I didn't want to use it directly from distutils2 itself, since I do not want it to be an installation dependency for our packages yet--it is still too unstable (the latest version on PyPI doesn't even install). """ # These first two imports are not used, but are needed to get around an # irritating Python bug that can crop up when using ./setup.py test. # See: http://www.eby-sarna.com/pipermail/peak/2010-May/003355.html try: import multiprocessing # noqa except ImportError: pass import logging # noqa from collections import defaultdict import io import os import re import shlex import sys import traceback import distutils.ccompiler from distutils import errors from distutils import log import pkg_resources from setuptools import dist as st_dist from setuptools import extension try: import ConfigParser as configparser except ImportError: import configparser from pbr import extra_files import pbr.hooks # A simplified RE for this; just checks that the line ends with version # predicates in () _VERSION_SPEC_RE = re.compile(r'\s*(.*?)\s*\((.*)\)\s*$') # Mappings from setup.cfg options, in (section, option) form, to setup() # keyword arguments CFG_TO_PY_SETUP_ARGS = ( (('metadata', 'name'), 'name'), (('metadata', 'version'), 'version'), (('metadata', 'author'), 'author'), (('metadata', 'author_email'), 'author_email'), (('metadata', 'maintainer'), 'maintainer'), (('metadata', 'maintainer_email'), 'maintainer_email'), (('metadata', 'home_page'), 'url'), (('metadata', 'project_urls'), 'project_urls'), (('metadata', 'summary'), 'description'), (('metadata', 'keywords'), 'keywords'), (('metadata', 'description'), 'long_description'), ( ('metadata', 'description_content_type'), 'long_description_content_type', ), (('metadata', 'download_url'), 'download_url'), (('metadata', 'classifier'), 'classifiers'), (('metadata', 'platform'), 'platforms'), # ** (('metadata', 'license'), 'license'), # Use setuptools install_requires, not # broken distutils requires (('metadata', 'requires_dist'), 'install_requires'), (('metadata', 'setup_requires_dist'), 'setup_requires'), (('metadata', 'python_requires'), 'python_requires'), (('metadata', 'requires_python'), 'python_requires'), (('metadata', 'provides_dist'), 'provides'), # ** (('metadata', 'provides_extras'), 'provides_extras'), (('metadata', 'obsoletes_dist'), 'obsoletes'), # ** (('files', 'packages_root'), 'package_dir'), (('files', 'packages'), 'packages'), (('files', 'package_data'), 'package_data'), (('files', 'namespace_packages'), 'namespace_packages'), (('files', 'data_files'), 'data_files'), (('files', 'scripts'), 'scripts'), (('files', 'modules'), 'py_modules'), # ** (('global', 'commands'), 'cmdclass'), # Not supported in distutils2, but provided for # backwards compatibility with setuptools (('backwards_compat', 'zip_safe'), 'zip_safe'), (('backwards_compat', 'tests_require'), 'tests_require'), (('backwards_compat', 'dependency_links'), 'dependency_links'), (('backwards_compat', 'include_package_data'), 'include_package_data'), ) # setup() arguments that can have multiple values in setup.cfg MULTI_FIELDS = ("classifiers", "platforms", "install_requires", "provides", "obsoletes", "namespace_packages", "packages", "package_data", "data_files", "scripts", "py_modules", "dependency_links", "setup_requires", "tests_require", "keywords", "cmdclass", "provides_extras") # setup() arguments that can have mapping values in setup.cfg MAP_FIELDS = ("project_urls",) # setup() arguments that contain boolean values BOOL_FIELDS = ("zip_safe", "include_package_data") CSV_FIELDS = () def shlex_split(path): if os.name == 'nt': # shlex cannot handle paths that contain backslashes, treating those # as escape characters. path = path.replace("\\", "/") return [x.replace("/", "\\") for x in shlex.split(path)] return shlex.split(path) def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. Raise ImportError if the module or name is not found. """ parts = name.split('.') cursor = len(parts) - 1 module_name = parts[:cursor] attr_name = parts[-1] while cursor > 0: try: ret = __import__('.'.join(module_name), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret def cfg_to_args(path='setup.cfg', script_args=()): """Distutils2 to distutils1 compatibility util. This method uses an existing setup.cfg to generate a dictionary of keywords that can be used by distutils.core.setup(kwargs**). :param path: The setup.cfg path. :param script_args: List of commands setup.py was called with. :raises DistutilsFileError: When the setup.cfg file is not found. """ # The method source code really starts here. if sys.version_info >= (3, 0): parser = configparser.ConfigParser() else: parser = configparser.SafeConfigParser() if not os.path.exists(path): raise errors.DistutilsFileError("file '%s' does not exist" % os.path.abspath(path)) try: parser.read(path, encoding='utf-8') except TypeError: # Python 2 doesn't accept the encoding kwarg parser.read(path) config = {} for section in parser.sections(): config[section] = dict() for k, value in parser.items(section): config[section][k.replace('-', '_')] = value # Run setup_hooks, if configured setup_hooks = has_get_option(config, 'global', 'setup_hooks') package_dir = has_get_option(config, 'files', 'packages_root') # Add the source package directory to sys.path in case it contains # additional hooks, and to make sure it's on the path before any existing # installations of the package if package_dir: package_dir = os.path.abspath(package_dir) sys.path.insert(0, package_dir) try: if setup_hooks: setup_hooks = [ hook for hook in split_multiline(setup_hooks) if hook != 'pbr.hooks.setup_hook'] for hook in setup_hooks: hook_fn = resolve_name(hook) try: hook_fn(config) except SystemExit: log.error('setup hook %s terminated the installation') except Exception: e = sys.exc_info()[1] log.error('setup hook %s raised exception: %s\n' % (hook, e)) log.error(traceback.format_exc()) sys.exit(1) # Run the pbr hook pbr.hooks.setup_hook(config) kwargs = setup_cfg_to_setup_kwargs(config, script_args) # Set default config overrides kwargs['include_package_data'] = True kwargs['zip_safe'] = False register_custom_compilers(config) ext_modules = get_extension_modules(config) if ext_modules: kwargs['ext_modules'] = ext_modules entry_points = get_entry_points(config) if entry_points: kwargs['entry_points'] = entry_points # Handle the [files]/extra_files option files_extra_files = has_get_option(config, 'files', 'extra_files') if files_extra_files: extra_files.set_extra_files(split_multiline(files_extra_files)) finally: # Perform cleanup if any paths were added to sys.path if package_dir: sys.path.pop(0) return kwargs def setup_cfg_to_setup_kwargs(config, script_args=()): """Convert config options to kwargs. Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for alias, arg in CFG_TO_PY_SETUP_ARGS: section, option = alias in_cfg_value = has_get_option(config, section, option) if not in_cfg_value and arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = io.open(filename, encoding='utf-8') try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value if not in_cfg_value: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=', 1) in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = ( r'(?P[^;]*);?(?P[^#]*?)(?:\s*#.*)?$') for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package, env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key_unquoted = shlex_split(key.strip())[0] key, value = (key_unquoted, value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(shlex_split(value)) else: prev = data_files[key.strip()] = shlex_split(value) elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(shlex_split(line.strip())) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = sorted(data_files.items()) in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = ( r'(?P[^:]*):?(?P[^#]*?)(?:\s*#.*)?$') extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join(packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value, env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "pbr/latest/user/using.html#environment-markers" ) raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs def register_custom_compilers(config): """Handle custom compilers. This has no real equivalent in distutils, where additional compilers could only be added programmatically, so we have to hack it in somehow. """ compilers = has_get_option(config, 'global', 'compilers') if compilers: compilers = split_multiline(compilers) for compiler in compilers: compiler = resolve_name(compiler) # In distutils2 compilers these class attributes exist; for # distutils1 we just have to make something up if hasattr(compiler, 'name'): name = compiler.name else: name = compiler.__name__ if hasattr(compiler, 'description'): desc = compiler.description else: desc = 'custom compiler %s' % name module_name = compiler.__module__ # Note; this *will* override built in compilers with the same name # TODO(embray): Maybe display a warning about this? cc = distutils.ccompiler.compiler_class cc[name] = (module_name, compiler.__name__, desc) # HACK!!!! Distutils assumes all compiler modules are in the # distutils package sys.modules['distutils.' + module_name] = sys.modules[module_name] def get_extension_modules(config): """Handle extension modules""" EXTENSION_FIELDS = ("sources", "include_dirs", "define_macros", "undef_macros", "library_dirs", "libraries", "runtime_library_dirs", "extra_objects", "extra_compile_args", "extra_link_args", "export_symbols", "swig_opts", "depends") ext_modules = [] for section in config: if ':' in section: labels = section.split(':', 1) else: # Backwards compatibility for old syntax; don't use this though labels = section.split('=', 1) labels = [label.strip() for label in labels] if (len(labels) == 2) and (labels[0] == 'extension'): ext_args = {} for field in EXTENSION_FIELDS: value = has_get_option(config, section, field) # All extension module options besides name can have multiple # values if not value: continue value = split_multiline(value) if field == 'define_macros': macros = [] for macro in value: macro = macro.split('=', 1) if len(macro) == 1: macro = (macro[0].strip(), None) else: macro = (macro[0].strip(), macro[1].strip()) macros.append(macro) value = macros ext_args[field] = value if ext_args: if 'name' not in ext_args: ext_args['name'] = labels[1] ext_modules.append(extension.Extension(ext_args.pop('name'), **ext_args)) return ext_modules def get_entry_points(config): """Process the [entry_points] section of setup.cfg. Processes setup.cfg to handle setuptools entry points. This is, of course, not a standard feature of distutils2/packaging, but as there is not currently a standard alternative in packaging, we provide support for them. """ if 'entry_points' not in config: return {} return dict((option, split_multiline(value)) for option, value in config['entry_points'].items()) def has_get_option(config, section, option): if section in config and option in config[section]: return config[section][option] else: return False def split_multiline(value): """Special behaviour when we have a multi line options""" value = [element for element in (line.strip() for line in value.split('\n')) if element and not element.startswith('#')] return value def split_csv(value): """Special behaviour when we have a comma separated options""" value = [element for element in (chunk.strip() for chunk in value.split(',')) if element] return value # The following classes are used to hack Distribution.command_options a bit class DefaultGetDict(defaultdict): """Like defaultdict, but get() also sets and returns the default value.""" def get(self, key, default=None): if default is None: default = self.default_factory() return super(DefaultGetDict, self).setdefault(key, default) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pbr/version.py0000664000175000017500000005007600000000000015237 0ustar00zuulzuul00000000000000 # Copyright 2012 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Utilities for consuming the version from importlib-metadata. """ import itertools import operator import sys # TODO(stephenfin): Remove this once we drop support for Python < 3.8 if sys.version_info >= (3, 8): from importlib import metadata as importlib_metadata use_importlib = True else: try: import importlib_metadata use_importlib = True except ImportError: use_importlib = False def _is_int(string): try: int(string) return True except ValueError: return False class SemanticVersion(object): """A pure semantic version independent of serialisation. See the pbr doc 'semver' for details on the semantics. """ def __init__( self, major, minor=0, patch=0, prerelease_type=None, prerelease=None, dev_count=None): """Create a SemanticVersion. :param major: Major component of the version. :param minor: Minor component of the version. Defaults to 0. :param patch: Patch level component. Defaults to 0. :param prerelease_type: What sort of prerelease version this is - one of a(alpha), b(beta) or rc(release candidate). :param prerelease: For prerelease versions, what number prerelease. Defaults to 0. :param dev_count: How many commits since the last release. """ self._major = major self._minor = minor self._patch = patch self._prerelease_type = prerelease_type self._prerelease = prerelease if self._prerelease_type and not self._prerelease: self._prerelease = 0 self._dev_count = dev_count or 0 # Normalise 0 to None. def __eq__(self, other): if not isinstance(other, SemanticVersion): return False return self.__dict__ == other.__dict__ def __hash__(self): return sum(map(hash, self.__dict__.values())) def _sort_key(self): """Return a key for sorting SemanticVersion's on.""" # key things: # - final is after rc's, so we make that a/b/rc/z # - dev==None is after all other devs, so we use sys.maxsize there. # - unqualified dev releases come before any pre-releases. # So we do: # (major, minor, patch) - gets the major grouping. # (0|1) unqualified dev flag # (a/b/rc/z) - release segment grouping # pre-release level # dev count, maxsize for releases. rc_lookup = {'a': 'a', 'b': 'b', 'rc': 'rc', None: 'z'} if self._dev_count and not self._prerelease_type: uq_dev = 0 else: uq_dev = 1 return ( self._major, self._minor, self._patch, uq_dev, rc_lookup[self._prerelease_type], self._prerelease, self._dev_count or sys.maxsize) def __lt__(self, other): """Compare self and other, another Semantic Version.""" # NB(lifeless) this could perhaps be rewritten as # lt (tuple_of_one, tuple_of_other) with a single check for # the typeerror corner cases - that would likely be faster # if this ever becomes performance sensitive. if not isinstance(other, SemanticVersion): raise TypeError("ordering to non-SemanticVersion is undefined") return self._sort_key() < other._sort_key() def __le__(self, other): return self == other or self < other def __ge__(self, other): return not self < other def __gt__(self, other): return not self <= other def __ne__(self, other): return not self == other def __repr__(self): return "pbr.version.SemanticVersion(%s)" % self.release_string() @classmethod def from_pip_string(klass, version_string): """Create a SemanticVersion from a pip version string. This method will parse a version like 1.3.0 into a SemanticVersion. This method is responsible for accepting any version string that any older version of pbr ever created. Therefore: versions like 1.3.0a1 versions are handled, parsed into a canonical form and then output - resulting in 1.3.0.0a1. Pre pbr-semver dev versions like 0.10.1.3.g83bef74 will be parsed but output as 0.10.1.dev3.g83bef74. :raises ValueError: Never tagged versions sdisted by old pbr result in just the git hash, e.g. '1234567' which poses a substantial problem since they collide with the semver versions when all the digits are numerals. Such versions will result in a ValueError being thrown if any non-numeric digits are present. They are an exception to the general case of accepting anything we ever output, since they were never intended and would permanently mess up versions on PyPI if ever released - we're treating that as a critical bug that we ever made them and have stopped doing that. """ try: return klass._from_pip_string_unsafe(version_string) except IndexError: raise ValueError("Invalid version %r" % version_string) @classmethod def _from_pip_string_unsafe(klass, version_string): # Versions need to start numerically, ignore if not version_string = version_string.lstrip('vV') if not version_string[:1].isdigit(): raise ValueError("Invalid version %r" % version_string) input_components = version_string.split('.') # decimals first (keep pre-release and dev/hashes to the right) components = [c for c in input_components if c.isdigit()] digit_len = len(components) if digit_len == 0: raise ValueError("Invalid version %r" % version_string) elif digit_len < 3: if (digit_len < len(input_components) and input_components[digit_len][0].isdigit()): # Handle X.YaZ - Y is a digit not a leadin to pre-release. mixed_component = input_components[digit_len] last_component = ''.join(itertools.takewhile( lambda x: x.isdigit(), mixed_component)) components.append(last_component) input_components[digit_len:digit_len + 1] = [ last_component, mixed_component[len(last_component):]] digit_len += 1 components.extend([0] * (3 - digit_len)) components.extend(input_components[digit_len:]) major = int(components[0]) minor = int(components[1]) dev_count = None post_count = None prerelease_type = None prerelease = None def _parse_type(segment): # Discard leading digits (the 0 in 0a1) isdigit = operator.methodcaller('isdigit') segment = ''.join(itertools.dropwhile(isdigit, segment)) isalpha = operator.methodcaller('isalpha') prerelease_type = ''.join(itertools.takewhile(isalpha, segment)) prerelease = segment[len(prerelease_type)::] return prerelease_type, int(prerelease) if _is_int(components[2]): patch = int(components[2]) else: # legacy version e.g. 1.2.0a1 (canonical is 1.2.0.0a1) # or 1.2.dev4.g1234 or 1.2.b4 patch = 0 components[2:2] = [0] remainder = components[3:] remainder_starts_with_int = False try: if remainder and int(remainder[0]): remainder_starts_with_int = True except ValueError: pass if remainder_starts_with_int: # old dev format - 0.1.2.3.g1234 dev_count = int(remainder[0]) else: if remainder and (remainder[0][0] == '0' or remainder[0][0] in ('a', 'b', 'r')): # Current RC/beta layout prerelease_type, prerelease = _parse_type(remainder[0]) remainder = remainder[1:] while remainder: component = remainder[0] if component.startswith('dev'): dev_count = int(component[3:]) elif component.startswith('post'): dev_count = None post_count = int(component[4:]) else: raise ValueError( 'Unknown remainder %r in %r' % (remainder, version_string)) remainder = remainder[1:] result = SemanticVersion( major, minor, patch, prerelease_type=prerelease_type, prerelease=prerelease, dev_count=dev_count) if post_count: if dev_count: raise ValueError( 'Cannot combine postN and devN - no mapping in %r' % (version_string,)) result = result.increment().to_dev(post_count) return result def brief_string(self): """Return the short version minus any alpha/beta tags.""" return "%s.%s.%s" % (self._major, self._minor, self._patch) def debian_string(self): """Return the version number to use when building a debian package. This translates the PEP440/semver precedence rules into Debian version sorting operators. """ return self._long_version("~") def decrement(self): """Return a decremented SemanticVersion. Decrementing versions doesn't make a lot of sense - this method only exists to support rendering of pre-release versions strings into serialisations (such as rpm) with no sort-before operator. The 9999 magic version component is from the spec on this - pbr-semver. :return: A new SemanticVersion object. """ if self._patch: new_patch = self._patch - 1 new_minor = self._minor new_major = self._major else: new_patch = 9999 if self._minor: new_minor = self._minor - 1 new_major = self._major else: new_minor = 9999 if self._major: new_major = self._major - 1 else: new_major = 0 return SemanticVersion( new_major, new_minor, new_patch) def increment(self, minor=False, major=False): """Return an incremented SemanticVersion. The default behaviour is to perform a patch level increment. When incrementing a prerelease version, the patch level is not changed - the prerelease serial is changed (e.g. beta 0 -> beta 1). Incrementing non-pre-release versions will not introduce pre-release versions - except when doing a patch incremental to a pre-release version the new version will only consist of major/minor/patch. :param minor: Increment the minor version. :param major: Increment the major version. :return: A new SemanticVersion object. """ if self._prerelease_type: new_prerelease_type = self._prerelease_type new_prerelease = self._prerelease + 1 new_patch = self._patch else: new_prerelease_type = None new_prerelease = None new_patch = self._patch + 1 if minor: new_minor = self._minor + 1 new_patch = 0 new_prerelease_type = None new_prerelease = None else: new_minor = self._minor if major: new_major = self._major + 1 new_minor = 0 new_patch = 0 new_prerelease_type = None new_prerelease = None else: new_major = self._major return SemanticVersion( new_major, new_minor, new_patch, new_prerelease_type, new_prerelease) def _long_version(self, pre_separator, rc_marker=""): """Construct a long string version of this semver. :param pre_separator: What separator to use between components that sort before rather than after. If None, use . and lower the version number of the component to preserve sorting. (Used for rpm support) """ if ((self._prerelease_type or self._dev_count) and pre_separator is None): segments = [self.decrement().brief_string()] pre_separator = "." else: segments = [self.brief_string()] if self._prerelease_type: segments.append( "%s%s%s%s" % (pre_separator, rc_marker, self._prerelease_type, self._prerelease)) if self._dev_count: if not self._prerelease_type: segments.append(pre_separator) else: segments.append('.') segments.append('dev') segments.append(self._dev_count) return "".join(str(s) for s in segments) def release_string(self): """Return the full version of the package. This including suffixes indicating VCS status. """ return self._long_version(".", "0") def rpm_string(self): """Return the version number to use when building an RPM package. This translates the PEP440/semver precedence rules into RPM version sorting operators. Because RPM has no sort-before operator (such as the ~ operator in dpkg), we show all prerelease versions as being versions of the release before. """ return self._long_version(None) def to_dev(self, dev_count): """Return a development version of this semver. :param dev_count: The number of commits since the last release. """ return SemanticVersion( self._major, self._minor, self._patch, self._prerelease_type, self._prerelease, dev_count=dev_count) def version_tuple(self): """Present the version as a version_info tuple. For documentation on version_info tuples see the Python documentation for sys.version_info. Since semver and PEP-440 represent overlapping but not subsets of versions, we have to have some heuristic / mapping rules, and have extended the releaselevel field to have alphadev, betadev and candidatedev values. When they are present the dev count is used to provide the serial. - a/b/rc take precedence. - if there is no pre-release version the dev version is used. - serial is taken from the dev/a/b/c component. - final non-dev versions never get serials. """ segments = [self._major, self._minor, self._patch] if self._prerelease_type: type_map = {('a', False): 'alpha', ('b', False): 'beta', ('rc', False): 'candidate', ('a', True): 'alphadev', ('b', True): 'betadev', ('rc', True): 'candidatedev', } segments.append( type_map[(self._prerelease_type, bool(self._dev_count))]) segments.append(self._dev_count or self._prerelease) elif self._dev_count: segments.append('dev') segments.append(self._dev_count - 1) else: segments.append('final') segments.append(0) return tuple(segments) class VersionInfo(object): def __init__(self, package): """Object that understands versioning for a package :param package: name of the python package, such as glance, or python-glanceclient """ self.package = package self.version = None self._cached_version = None self._semantic = None def __str__(self): """Make the VersionInfo object behave like a string.""" return self.version_string() def __repr__(self): """Include the name.""" return "pbr.version.VersionInfo(%s:%s)" % ( self.package, self.version_string()) def _get_version_from_pkg_resources(self): """Obtain a version from pkg_resources or setup-time logic if missing. This will try to get the version of the package from the pkg_resources This will try to get the version of the package from the record associated with the package, and if there is no such record importlib_metadata record associated with the package, and if there falls back to the logic sdist would use. is no such record falls back to the logic sdist would use. """ import pkg_resources try: requirement = pkg_resources.Requirement.parse(self.package) provider = pkg_resources.get_provider(requirement) result_string = provider.version except pkg_resources.DistributionNotFound: # The most likely cause for this is running tests in a tree # produced from a tarball where the package itself has not been # installed into anything. Revert to setup-time logic. from pbr import packaging result_string = packaging.get_version(self.package) return SemanticVersion.from_pip_string(result_string) def _get_version_from_importlib_metadata(self): """Obtain a version from importlib or setup-time logic if missing. This will try to get the version of the package from the importlib_metadata record associated with the package, and if there is no such record falls back to the logic sdist would use. """ try: distribution = importlib_metadata.distribution(self.package) result_string = distribution.version except importlib_metadata.PackageNotFoundError: # The most likely cause for this is running tests in a tree # produced from a tarball where the package itself has not been # installed into anything. Revert to setup-time logic. from pbr import packaging result_string = packaging.get_version(self.package) return SemanticVersion.from_pip_string(result_string) def release_string(self): """Return the full version of the package. This including suffixes indicating VCS status. """ return self.semantic_version().release_string() def semantic_version(self): """Return the SemanticVersion object for this version.""" if self._semantic is None: # TODO(damami): simplify this once Python 3.8 is the oldest # we support if use_importlib: self._semantic = self._get_version_from_importlib_metadata() else: self._semantic = self._get_version_from_pkg_resources() return self._semantic def version_string(self): """Return the short version minus any alpha/beta tags.""" return self.semantic_version().brief_string() # Compatibility functions canonical_version_string = version_string version_string_with_vcs = release_string def cached_version_string(self, prefix=""): """Return a cached version string. This will return a cached version string if one is already cached, irrespective of prefix. If none is cached, one will be created with prefix and then cached and returned. """ if not self._cached_version: self._cached_version = "%s%s" % (prefix, self.version_string()) return self._cached_version ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8979604 pbr-5.11.1/pbr.egg-info/0000775000175000017500000000000000000000000014662 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/PKG-INFO0000664000175000017500000000243700000000000015765 0ustar00zuulzuul00000000000000Metadata-Version: 2.1 Name: pbr Version: 5.11.1 Summary: Python Build Reasonableness Home-page: https://docs.openstack.org/pbr/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Project-URL: Bug Tracker, https://bugs.launchpad.net/pbr/ Project-URL: Documentation, https://docs.openstack.org/pbr/ Project-URL: Source Code, https://opendev.org/openstack/pbr Description: Python Build Reasonableness Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Console Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Requires-Python: >=2.6 Description-Content-Type: text/x-rst; charset=UTF-8 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/SOURCES.txt0000664000175000017500000000730300000000000016551 0ustar00zuulzuul00000000000000.coveragerc .mailmap .pre-commit-config.yaml .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog LICENSE README.rst pyproject.toml.future setup.cfg setup.py test-requirements.txt tox.ini doc/requirements.txt doc/source/conf.py doc/source/index.rst doc/source/contributor/index.rst doc/source/reference/index.rst doc/source/user/compatibility.rst doc/source/user/features.rst doc/source/user/history.rst doc/source/user/index.rst doc/source/user/packagers.rst doc/source/user/releasenotes.rst doc/source/user/semver.rst doc/source/user/using.rst pbr/__init__.py pbr/build.py pbr/builddoc.py pbr/core.py pbr/extra_files.py pbr/find_package.py pbr/git.py pbr/options.py pbr/packaging.py pbr/pbr_json.py pbr/sphinxext.py pbr/testr_command.py pbr/util.py pbr/version.py pbr.egg-info/PKG-INFO pbr.egg-info/SOURCES.txt pbr.egg-info/dependency_links.txt pbr.egg-info/entry_points.txt pbr.egg-info/not-zip-safe pbr.egg-info/top_level.txt pbr/cmd/__init__.py pbr/cmd/main.py pbr/hooks/__init__.py pbr/hooks/backwards.py pbr/hooks/base.py pbr/hooks/commands.py pbr/hooks/files.py pbr/hooks/metadata.py pbr/tests/__init__.py pbr/tests/base.py pbr/tests/test_commands.py pbr/tests/test_core.py pbr/tests/test_files.py pbr/tests/test_hooks.py pbr/tests/test_integration.py pbr/tests/test_packaging.py pbr/tests/test_pbr_json.py pbr/tests/test_setup.py pbr/tests/test_util.py pbr/tests/test_version.py pbr/tests/test_wsgi.py pbr/tests/util.py pbr/tests/testpackage/CHANGES.txt pbr/tests/testpackage/LICENSE.txt pbr/tests/testpackage/MANIFEST.in pbr/tests/testpackage/README.txt pbr/tests/testpackage/extra-file.txt pbr/tests/testpackage/git-extra-file.txt pbr/tests/testpackage/setup.cfg pbr/tests/testpackage/setup.py pbr/tests/testpackage/test-requirements.txt pbr/tests/testpackage/data_files/a.txt pbr/tests/testpackage/data_files/b.txt pbr/tests/testpackage/data_files/c.rst pbr/tests/testpackage/doc/source/conf.py pbr/tests/testpackage/doc/source/index.rst pbr/tests/testpackage/doc/source/installation.rst pbr/tests/testpackage/doc/source/usage.rst pbr/tests/testpackage/pbr_testpackage/__init__.py pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py pbr/tests/testpackage/pbr_testpackage/cmd.py pbr/tests/testpackage/pbr_testpackage/extra.py pbr/tests/testpackage/pbr_testpackage/wsgi.py pbr/tests/testpackage/pbr_testpackage/package_data/1.txt pbr/tests/testpackage/pbr_testpackage/package_data/2.txt pbr/tests/testpackage/src/testext.c playbooks/pbr-installation-openstack/pre.yaml playbooks/pbr-installation-openstack/run.yaml releasenotes/notes/bdist_wininst-removal-4a1c7c3a9f08238d.yaml releasenotes/notes/cmd-e6664dcbd42d3935.yaml releasenotes/notes/deprecate-pyN-requirements-364655c38fa5b780.yaml releasenotes/notes/deprecate-testr-nose-integration-56e3e11248d946fc.yaml releasenotes/notes/fix-global-replace-of-src-prefix-in-glob-eb850b94ca96993e.yaml releasenotes/notes/fix-handling-of-spaces-in-data-files-glob-0fe0c398d70dfea8.yaml releasenotes/notes/fix-keywords-as-cfg-list-6cadc5141429d7f5.yaml releasenotes/notes/fix-mapping-value-explode-with-equal-sign-41bf822fa4dd0e68.yaml releasenotes/notes/fix-pep517-metadata-regression-bc287e60e45b2732.yaml releasenotes/notes/fix-symbols-leading-spaces-f68928d75a8f0997.yaml releasenotes/notes/ignore-find-links-07cf54f465aa33a6.yaml releasenotes/notes/long-descr-content-type-f9a1003acbb8740f.yaml releasenotes/notes/pep517-support-89189ce0bab15845.yaml releasenotes/notes/remove-command-hooks-907d9c2325f306ca.yaml releasenotes/notes/support-vcs-uris-with-subdirectories-20ad68b6138f72ca.yaml releasenotes/notes/use_2to3-removal-ac48bf9fbfa049b1.yaml releasenotes/notes/v_version-457b38c8679c5868.yaml releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/unreleased.rst tools/integration.sh././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/dependency_links.txt0000664000175000017500000000000100000000000020730 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/entry_points.txt0000664000175000017500000000022500000000000020157 0ustar00zuulzuul00000000000000[console_scripts] pbr = pbr.cmd.main:main [distutils.setup_keywords] pbr = pbr.core:pbr [egg_info.writers] pbr.json = pbr.pbr_json:write_pbr_json ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/not-zip-safe0000664000175000017500000000000100000000000017110 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431461.0 pbr-5.11.1/pbr.egg-info/top_level.txt0000664000175000017500000000000400000000000017406 0ustar00zuulzuul00000000000000pbr ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.8859599 pbr-5.11.1/playbooks/0000775000175000017500000000000000000000000014410 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1673431461.9059608 pbr-5.11.1/playbooks/pbr-installation-openstack/0000775000175000017500000000000000000000000021657 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/playbooks/pbr-installation-openstack/pre.yaml0000664000175000017500000000007700000000000023335 0ustar00zuulzuul00000000000000- hosts: all roles: - ensure-pip - ensure-virtualenv ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/playbooks/pbr-installation-openstack/run.yaml0000664000175000017500000000040200000000000023343 0ustar00zuulzuul00000000000000- hosts: all tasks: - shell: cmd: | export PBR_PIP_VERSION="{{ pbr_pip_version }}" bash -xe /home/zuul/src/opendev.org/openstack/pbr/tools/integration.sh $(cat /home/zuul/src/opendev.org/openstack/requirements/projects.txt) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/pyproject.toml.future0000664000175000017500000000057100000000000016635 0ustar00zuulzuul00000000000000# PBR doesn't use the pyproject.toml interface internally yet as # fixing issues in the system will be difficult if PBR itself # depends on it. We will put this file into place at pyproject.toml # once we are more confident it works generally. [build-system] requires = ["setuptools>=36.6.0"] build-backend = "pbr.build" backend-path = ["."] [tools.setuptools] py-modules=[] ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1673431461.88996 pbr-5.11.1/releasenotes/0000775000175000017500000000000000000000000015076 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/releasenotes/notes/0000775000175000017500000000000000000000000016226 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/bdist_wininst-removal-4a1c7c3a9f08238d.yaml0000664000175000017500000000055600000000000025657 0ustar00zuulzuul00000000000000--- other: - | Support to generate bdist_wininst packages has been removed. As of Python 3.8 and Setuptools 47.2 it's deprecated in favor of just using wheels for Windows platform packaging. See https://discuss.python.org/t/deprecate-bdist-wininst/ and https://discuss.python.org/t/remove-distutils-bdist-wininst-command/ for more details. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/cmd-e6664dcbd42d3935.yaml0000664000175000017500000000024200000000000022076 0ustar00zuulzuul00000000000000--- features: - | Add an option to print only the version of a package Example: .. code-block:: bash $ pbr info -s pkgname 1.2.3 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/deprecate-pyN-requirements-364655c38fa5b780.yaml0000664000175000017500000000022000000000000026412 0ustar00zuulzuul00000000000000--- deprecations: - | Support for ``pyN``-suffixed requirement files has been deprecated: environment markers should be used instead. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/deprecate-testr-nose-integration-56e3e11248d946fc.yaml0000664000175000017500000000076100000000000027641 0ustar00zuulzuul00000000000000--- deprecations: - | *testr* and *nose* integration has been deprecated. This feature allowed *pbr* to dynamically configure the test runner used when running ``setup.py test``. However, this target has fallen out of favour in both the OpenStack and broader Python ecosystem, and both *testr* and *nose* offer native setuptools commands that can be manually aliased to ``test`` on a per-project basis, if necessary. This feature will be removed in a future release. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-global-replace-of-src-prefix-in-glob-eb850b94ca96993e.yaml0000664000175000017500000000066500000000000031022 0ustar00zuulzuul00000000000000--- fixes: - | Fixes a bug where the directory names of items specified in ``data_files`` could be renamed if the source prefix glob was contained within the directory name. See `bug 1810804 `_ for details. For more information on ``data_files``, see the `distutils documentation `_. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-handling-of-spaces-in-data-files-glob-0fe0c398d70dfea8.yaml0000664000175000017500000000024500000000000031257 0ustar00zuulzuul00000000000000--- fixes: - | Fixes the handling of spaces in data_files globs. Please see `bug 1810934 `_ for more details. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-keywords-as-cfg-list-6cadc5141429d7f5.yaml0000664000175000017500000000041200000000000026067 0ustar00zuulzuul00000000000000--- fixes: - | Fix error when ``keywords`` argument as a cfg list. Previously ``keywords`` were ``CSV_FIELDS`` and with these changes ``keywords`` are now ``MULTI_FIELDS``. Refer to https://bugs.launchpad.net/pbr/+bug/1811475 for more information. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-mapping-value-explode-with-equal-sign-41bf822fa4dd0e68.yaml0000664000175000017500000000043400000000000031412 0ustar00zuulzuul00000000000000--- fixes: - | Fix mapping error on values who contains a literal ``=``. Example when setup.cfg contains content like the following project urls configuration "project_urls = Documentation = http://foo.bar/?badge=latest". https://bugs.launchpad.net/pbr/+bug/1817592 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-pep517-metadata-regression-bc287e60e45b2732.yaml0000664000175000017500000000035400000000000027011 0ustar00zuulzuul00000000000000--- fixes: - | Packages generated with the 5.8.0 release of PBR failed to incorporate pbr.json metadata files. This is now corrected. Users can rebuild packages with newer PBR if they want that missing metadata generated. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/fix-symbols-leading-spaces-f68928d75a8f0997.yaml0000664000175000017500000000015200000000000026355 0ustar00zuulzuul00000000000000--- fixes: - | Fix an issue where symbols that were indented would produce an incorrect version.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/ignore-find-links-07cf54f465aa33a6.yaml0000664000175000017500000000027600000000000024650 0ustar00zuulzuul00000000000000--- fixes: - | PBR now ignores ``--find-links`` in requirements files. This option is not a valid ``install_requires`` entry for setuptools and thus breaks PBR-based installs. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/long-descr-content-type-f9a1003acbb8740f.yaml0000664000175000017500000000025400000000000026061 0ustar00zuulzuul00000000000000--- fixes: - | The ``description-content-type`` was not being set correctly. It will now be correctly populated when using ``setuptools`` 39.2.0 and beyond. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/pep517-support-89189ce0bab15845.yaml0000664000175000017500000000036400000000000024014 0ustar00zuulzuul00000000000000--- features: - | PBR now includes a PEP 517 build-backend and can be used in pyproject.toml build-system configuration. Setuptools continues to be the underlying mechanism with PBR acting as a driver via PEP 517 entrypoints. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/remove-command-hooks-907d9c2325f306ca.yaml0000664000175000017500000000032500000000000025300 0ustar00zuulzuul00000000000000--- upgrade: - | Support for entry point command hooks has been removed. This feature was poorly tested, poorly documented, and broken in some environments. Support for global hooks is not affected. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/support-vcs-uris-with-subdirectories-20ad68b6138f72ca.yaml0000664000175000017500000000057300000000000030612 0ustar00zuulzuul00000000000000--- features: - | Subdirectories can now be included when specfifying a requirement in ``requirements.txt`` or ``test-requirements.txt`` using VCS URIs. For example: -e git+https://foo.com/zipball#egg=bar&subdirectory=baz For more information, refer to the `pip documentation `__. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/use_2to3-removal-ac48bf9fbfa049b1.yaml0000664000175000017500000000067200000000000024662 0ustar00zuulzuul00000000000000--- other: - | The 2to3 conversion utility has been long discouraged in favor of writing multi-version-capable scripts. As of Setuptools 46.2.0 it's deprecated and slated for removal from the Python 3.10 standard library. Projects which still need it are encouraged to perform conversion prior to packaging. See https://bugs.python.org/issue40360 and https://github.com/pypa/setuptools/issues/2086 for more details. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/notes/v_version-457b38c8679c5868.yaml0000664000175000017500000000021700000000000023155 0ustar00zuulzuul00000000000000--- features: - | Support version parsing of git tag with the ``v`` pattern (or ``V``), in addition to ````. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/releasenotes/source/0000775000175000017500000000000000000000000016376 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/source/conf.py0000664000175000017500000000333500000000000017701 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # pbr Release Notes documentation build configuration file # -- General configuration ------------------------------------------------ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'openstackdocstheme', 'reno.sphinxext', ] # The master toctree document. master_doc = 'index' # Release notes are version independent # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # -- Options for openstackdocstheme --------------------------------------- # Deprecated options for openstackdocstheme < 2.2.0, can be removed once # pbr stops supporting py27. repository_name = 'openstack/pbr' bug_project = 'pbr' bug_tag = '' # New options with openstackdocstheme >=2.2.0 openstackdocs_repo_name = 'openstack/pbr' openstackdocs_auto_name = False openstackdocs_bug_project = 'pbr' openstackdocs_bug_tag = '' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/source/index.rst0000664000175000017500000000014300000000000020235 0ustar00zuulzuul00000000000000================= pbr Release Notes ================= .. toctree:: :maxdepth: 1 unreleased ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/releasenotes/source/unreleased.rst0000664000175000017500000000015300000000000021256 0ustar00zuulzuul00000000000000============================ Current Series Release Notes ============================ .. release-notes:: ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/setup.cfg0000664000175000017500000000250500000000000014230 0ustar00zuulzuul00000000000000[metadata] name = pbr author = OpenStack author_email = openstack-discuss@lists.openstack.org description = Python Build Reasonableness long_description = file: README.rst long_description_content_type = text/x-rst; charset=UTF-8 url = https://docs.openstack.org/pbr/latest/ project_urls = Bug Tracker = https://bugs.launchpad.net/pbr/ Documentation = https://docs.openstack.org/pbr/ Source Code = https://opendev.org/openstack/pbr classifiers = Development Status :: 5 - Production/Stable Environment :: Console Environment :: OpenStack Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: Apache Software License Operating System :: OS Independent Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 [options] python_requires = >=2.6 [files] packages = pbr [entry_points] distutils.setup_keywords = pbr = pbr.core:pbr egg_info.writers = pbr.json = pbr.pbr_json:write_pbr_json console_scripts = pbr = pbr.cmd.main:main [bdist_wheel] universal = 1 [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/setup.py0000664000175000017500000000127000000000000014117 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools from pbr import util setuptools.setup( **util.cfg_to_args()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/test-requirements.txt0000664000175000017500000000160200000000000016645 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. wheel>=0.32.0 # MIT fixtures>=3.0.0 # Apache-2.0/BSD hacking>=1.1.0,<4.0.0;python_version>='3.6' # Apache-2.0 mock>=2.0.0,<4.0.0;python_version=='2.7' # BSD six>=1.12.0 # MIT stestr>=2.1.0,<3.0;python_version=='2.7' # Apache-2.0 stestr>=2.1.0;python_version>='3.0' # Apache-2.0 testresources>=2.0.0 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT virtualenv>=20.0.3 # MIT coverage!=4.4,>=4.0 # Apache-2.0 # optionally exposed by distutils commands sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD testrepository>=0.0.18 # Apache-2.0/BSD pre-commit>=2.6.0;python_version>='3.6' # MIT ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1673431461.909961 pbr-5.11.1/tools/0000775000175000017500000000000000000000000013545 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/tools/integration.sh0000664000175000017500000001124200000000000016424 0ustar00zuulzuul00000000000000#!/bin/bash -xe # Parameters: # PBR_PIP_VERSION :- if not set, run pip's latest release, if set must be a # valid reference file entry describing what pip to use. # WHEELHOUSE :- if not set, use a temporary wheelhouse, set to a specific path # to use an existing one. # PIPFLAGS :- may be set to any pip global option for e.g. debugging. # Bootstrappping the mkenv needs to install *a* pip export PIPVERSION=pip PIPFLAGS=${PIPFLAGS:-} function mkvenv { venv=$1 rm -rf $venv virtualenv -p python3 $venv $venv/bin/pip install $PIPFLAGS -U $PIPVERSION wheel requests # If a change to PBR is being tested, preinstall the wheel for it if [ -n "$PBR_CHANGE" ] ; then $venv/bin/pip install $PIPFLAGS $pbrsdistdir/dist/pbr-*.whl fi } # BASE should be a directory with a subdir called "openstack" and in that # dir, there should be a git repository for every entry in PROJECTS BASE=${BASE:-/home/zuul/src/opendev.org/} REPODIR=${REPODIR:-$BASE/openstack} # TODO: Figure out how to get this on to the box properly sudo apt-get update sudo apt-get install -y --force-yes libvirt-dev libxml2-dev libxslt-dev libmysqlclient-dev libpq-dev libnspr4-dev pkg-config libsqlite3-dev libffi-dev libldap2-dev libsasl2-dev ccache libkrb5-dev liberasurecode-dev libjpeg-dev libsystemd-dev libnss3-dev libssl-dev # FOR pyyaml sudo apt-get install -y --force-yes debhelper python3-all-dev python3-all-dbg libyaml-dev cython3 cython3-dbg quilt # And use ccache explitly export PATH=/usr/lib/ccache:$PATH tmpdir=$(mktemp -d) # Set up a wheelhouse export WHEELHOUSE=${WHEELHOUSE:-$tmpdir/.wheelhouse} mkvenv $tmpdir/wheelhouse # Specific PIP version - must succeed to be useful. # - build/download a local wheel so we don't hit the network on each venv. if [ -n "${PBR_PIP_VERSION:-}" ]; then td=$(mktemp -d) $tmpdir/wheelhouse/bin/pip wheel -w $td $PBR_PIP_VERSION # This version will now be installed in every new venv. export PIPVERSION="$td/$(ls $td)" $tmpdir/wheelhouse/bin/pip install -U $PIPVERSION # We have pip in global-requirements as open-ended requirements, # but since we don't use -U in any other invocations, our version # of pip should be sticky. fi # Build wheels for everything so we don't hit the network on each venv. # Not all packages properly build wheels (httpretty for example). # Do our best but ignore errors when making wheels. set +e $tmpdir/wheelhouse/bin/pip $PIPFLAGS wheel -w $WHEELHOUSE -f $WHEELHOUSE -r \ $REPODIR/requirements/global-requirements.txt set -e #BRANCH BRANCH=${OVERRIDE_ZUUL_BRANCH=:-master} # PROJECTS is a list of projects that we're testing PROJECTS=$* pbrsdistdir=$tmpdir/pbrsdist git clone $REPODIR/pbr $pbrsdistdir cd $pbrsdistdir # Prepare a wheel and flag whether a change to PBR is being tested if git fetch $ZUUL_URL/$ZUUL_PROJECT $ZUUL_REF ; then mkvenv wheel wheel/bin/python setup.py bdist_wheel PBR_CHANGE=1 fi eptest=$tmpdir/eptest mkdir $eptest cd $eptest cat < setup.cfg [metadata] name = test_project [entry_points] console_scripts = test_cmd = test_project:main EOF cat < setup.py import setuptools from requests import Timeout from socket import error as SocketError # Some environments have network issues that drop connections to pypi # when running integration tests, so we retry here so that hour-long # test runs are less likely to fail randomly. try: setuptools.setup( setup_requires=['pbr'], pbr=True, ) except (SocketError, Timeout): setuptools.setup( setup_requires=['pbr'], pbr=True, ) EOF mkdir test_project cat < test_project/__init__.py def main(): print("Test cmd") EOF epvenv=$eptest/venv mkvenv $epvenv eppbrdir=$tmpdir/eppbrdir git clone $REPODIR/pbr $eppbrdir $epvenv/bin/pip $PIPFLAGS install -f $WHEELHOUSE -e $eppbrdir # First check develop PBR_VERSION=0.0 $epvenv/bin/python setup.py develop cat $epvenv/bin/test_cmd grep 'PBR Generated' $epvenv/bin/test_cmd PBR_VERSION=0.0 $epvenv/bin/python setup.py develop --uninstall # Now check install PBR_VERSION=0.0 $epvenv/bin/python setup.py install cat $epvenv/bin/test_cmd grep 'PBR Generated' $epvenv/bin/test_cmd $epvenv/bin/test_cmd | grep 'Test cmd' projectdir=$tmpdir/projects mkdir -p $projectdir sudo chown -R $USER $REPODIR export PBR_INTEGRATION=1 export PIPFLAGS export PIPVERSION PBRVERSION=pbr if [ -n "$PBR_CHANGE" ] ; then PBRVERSION=$(ls $pbrsdistdir/dist/pbr-*.whl) fi export PBRVERSION export PROJECTS export REPODIR export WHEELHOUSE export OS_TEST_TIMEOUT=1200 cd $REPODIR/pbr mkvenv .venv source .venv/bin/activate pip install -r test-requirements.txt pip install ${REPODIR}/requirements stestr run --suppress-attachments test_integration ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1673431436.0 pbr-5.11.1/tox.ini0000664000175000017500000000333200000000000013721 0ustar00zuulzuul00000000000000[tox] minversion = 3.18.0 envlist = pep8,py3,docs [testenv] usedevelop = true passenv = PBR_INTEGRATION PIPFLAGS PIPVERSION PBRVERSION REPODIR WHEELHOUSE PROJECTS # TODO(fungi): drop distutils override once logging improves in Setuptools # https://github.com/pypa/setuptools/issues/3038 setenv = SETUPTOOLS_USE_DISTUTILS=stdlib OS_STDOUT_CAPTURE={env:OS_STDOUT_CAPTURE:1} OS_STDERR_CAPTURE={env:OS_STDERR_CAPTURE:1} OS_TEST_TIMEOUT={env:OS_TEST_TIMEOUT:60} # NOTE(stephenfin): pbr intentionally does not use constraints since we support # a broader range of Python versions than OpenStack as a whole deps = -r{toxinidir}/test-requirements.txt commands = stestr run --suppress-attachments {posargs} [testenv:pep8] commands = pre-commit run -a [testenv:docs] allowlist_externals = rm deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf doc/build doc/source/reference/api python setup.py sdist sphinx-build -W -b html doc/source doc/build/html {posargs} [testenv:releasenotes] allowlist_externals = rm deps = {[testenv:docs]deps} commands = rm -rf releasenotes/build sphinx-build -W -b html -d releasenotes/build/doctrees releasenotes/source releasenotes/build/html [testenv:venv] commands = {posargs} [testenv:cover] # TODO(fungi): drop distutils override once logging improves in Setuptools # https://github.com/pypa/setuptools/issues/3038 setenv = SETUPTOOLS_USE_DISTUTILS=stdlib PYTHON=coverage run --source pbr --parallel-mode commands = stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml [flake8] # W504 (you have to choose this or W503) ignore = W504 exclude = .venv,.tox,dist,doc,*.egg,build show-source = true