pax_global_header00006660000000000000000000000064144520230330014506gustar00rootroot0000000000000052 comment=28c613dbef5fce09dc3ba6b1baa811c2d76b2245 python-versioneer-0.29/000077500000000000000000000000001445202303300151405ustar00rootroot00000000000000python-versioneer-0.29/.gitattributes000066400000000000000000000000311445202303300200250ustar00rootroot00000000000000_version.py export-subst python-versioneer-0.29/.github/000077500000000000000000000000001445202303300165005ustar00rootroot00000000000000python-versioneer-0.29/.github/dependabot.yml000066400000000000000000000003711445202303300213310ustar00rootroot00000000000000# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" python-versioneer-0.29/.github/workflows/000077500000000000000000000000001445202303300205355ustar00rootroot00000000000000python-versioneer-0.29/.github/workflows/tox.yml000066400000000000000000000063601445202303300220770ustar00rootroot00000000000000name: Tox on: push: branches: - master - maint/* tags: - '*' pull_request: branches: - master - maint/* schedule: - cron: '0 0 * * MON' jobs: test: runs-on: ${{ matrix.os }} strategy: matrix: os: ['ubuntu-latest'] python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9'] shell: ['bash'] include: - os: 'windows-latest' python-version: '3.10' shell: 'bash' - os: 'windows-latest' python-version: '3.10' shell: 'pwsh' - os: 'windows-latest' python-version: '3.10' shell: 'cmd' defaults: run: shell: ${{ matrix.shell }} steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Install Tox run: | python -m pip install --upgrade pip pip install tox tox-gh-actions - name: Test run: tox env: PLATFORM: ${{ matrix.os }} bootstrap: # Couldn't figure out how to do this in tox # Need to make sure that versioneer can build itself from source without fetching # old versioneer wheels from PyPI runs-on: ubuntu-latest defaults: run: shell: 'bash' steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: actions/setup-python@v4 with: python-version: 3 - run: "pip install --no-binary :all: ." build: runs-on: ubuntu-latest defaults: run: shell: 'bash' steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: actions/setup-python@v4 with: python-version: 3 - run: pip install --upgrade build twine - run: python -m build - run: twine check dist/* - uses: actions/upload-artifact@v3 with: name: dist path: dist/ test-sdist: runs-on: ${{ matrix.os }} needs: [build] strategy: matrix: os: ['ubuntu-latest'] steps: - uses: actions/download-artifact@v3 with: name: dist path: dist/ - name: Unpack sdist run: tar xfvz dist/versioneer-*.tar.gz - uses: actions/setup-python@v4 with: python-version: 3 - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Install Tox run: | python -m pip install --upgrade pip pip install tox tox-gh-actions - name: Test run: cd versioneer-* && tox env: PLATFORM: ${{ matrix.os }} publish: runs-on: ubuntu-latest needs: [bootstrap, test, test-sdist] if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') steps: - uses: actions/download-artifact@v3 with: name: dist path: dist/ - uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} python-versioneer-0.29/.gitignore000066400000000000000000000001671445202303300171340ustar00rootroot00000000000000*.pyc /_test/ /build/ /versioneer.py /git_version.py /MANIFEST /dist/ /versioneer.egg-info/ /.idea *.iml /.tox/ .*.swp python-versioneer-0.29/.mailmap000066400000000000000000000003211445202303300165550ustar00rootroot00000000000000Christopher J. Markiewicz Christopher J. Markiewicz Panos Panos python-versioneer-0.29/INSTALL.md000066400000000000000000000143771445202303300166040ustar00rootroot00000000000000# Installation First, decide on values for the following configuration variables: * `VCS`: the version control system you use. Currently accepts "git". * `style`: the style of version string to be produced. See [Styles](./README.md#styles) for details. Defaults to "pep440", which looks like `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. * `versionfile_source`: A project-relative pathname into which the generated version strings should be written. This is usually a `_version.py` next to your project's main `__init__.py` file, so it can be imported at runtime. If your project uses `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. This file should be checked in to your VCS as usual: the copy created below by `versioneer install` will include code that parses expanded VCS keywords in generated tarballs. The 'build' and 'sdist' commands will replace it with a copy that has just the calculated version string. This must be set even if your project does not have any modules (and will therefore never import `_version.py`), since "setup.py sdist" -based trees still need somewhere to record the pre-calculated version strings. Anywhere in the source tree should do. If there is a `__init__.py` next to your `_version.py`, the `versioneer install` command (described below) will append some `__version__`-setting assignments, if they aren't already present. * `versionfile_build`: Like `versionfile_source`, but relative to the build directory instead of the source directory. These will differ when your setup.py uses 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, then you will probably have `versionfile_build='myproject/_version.py'` and `versionfile_source='src/myproject/_version.py'`. If this is set to None, then `setup.py build` will not attempt to rewrite any `_version.py` in the built tree. If your project does not have any libraries (e.g. if it only builds a script), then you should use `versionfile_build = None`. To actually use the computed version string, your `setup.py` will need to override `distutils.command.build_scripts` with a subclass that explicitly inserts a copy of `versioneer.get_version()` into your script file. See `test/demoapp-script-only/setup.py` for an example. * `tag_prefix`: a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. If your tags look like 'myproject-1.2.0', then you should use tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this should be an empty string, using either `tag_prefix=` or `tag_prefix=''`. * `parentdir_prefix`: a optional string, frequently the same as tag_prefix, which appears at the start of all unpacked tarball filenames. If your tarball unpacks into 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, just omit the field from your `setup.cfg`. This tool provides one script, named `versioneer`. That script has two modes: 1) "install --vendor", which writes a copy of `versioneer.py` into the current directory and runs `versioneer.py setup` to finish the installation. 2) "install --no-vendor", which attempts to run versioneer without installing a copy into the current directory. This is an experimental mode intended to work with `pyproject.toml`. ## Installation instructions ### Common steps To versioneer-enable your project: * 1: Install versioneer with `pip install versioneer` * 2: Modify your `pyproject.toml` or your `setup.cfg`, adding a section named `[tool.versioneer]` or `[versioneer]` (respectively) and populating it with the configuration values you decided earlier (note that the option names are not case-sensitive): ```toml [tool.versioneer] VCS = "git" style = "pep440" versionfile_source = "src/myproject/_version.py" versionfile_build = "myproject/_version.py" tag_prefix = "" parentdir_prefix = "myproject-" ``` ```ini [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- ``` * 3: If using the EXPERIMENTAL non-vendored mode, add `versioneer` to your `pyproject.toml`: ```toml [build-system] requires = ["setuptools", "versioneer[toml]==0.29"] build-backend = "setuptools.build_meta" ``` It is recommended to pin the version of Versioneer you installed with. The `[toml]` extra is required if your configuration is also `pyproject.toml`. * 4: Run `versioneer install --vendor` OR `versioneer install --no-vendor`. This will do the following: * copy `versioneer.py` into the top of your source tree (vendored mode) * create `_version.py` in the right place (`versionfile_source`) * modify your `__init__.py` (if one exists next to `_version.py`) to define `__version__` (by calling a function from `_version.py`) `versioneer install` will complain about any problems it finds with your `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all the problems. * 5: add a `import versioneer` to your setup.py, and add the following arguments to the setup() call: version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), If your project uses a special `cmdclass`, pass that `cmdclass` as a parameter. For example: from numpy.distutils.core import numpy_cmdclass cmdclass=versioneer.get_cmdclass(numpy_cmdclass), * 6: commit these changes to your VCS. To make sure you won't forget, `versioneer install` will mark everything it touched for addition using `git add`. Don't forget to add `setup.py` and `setup.cfg` too. ## Post-Installation Usage Once established, all uses of your tree from a VCS checkout should get the current version string. All generated tarballs should include an embedded version string (so users who unpack them will not need a VCS tool installed). If you distribute your project through PyPI, then the release process should boil down to two steps: * 1: git tag 1.0 * 2: python setup.py register sdist upload If you distribute it through github (i.e. users use github to generate tarballs with `git archive`), the process is: * 1: git tag 1.0 * 2: git push; git push --tags Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at least one tag in its history. python-versioneer-0.29/LICENSE000066400000000000000000000022741445202303300161520ustar00rootroot00000000000000This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to python-versioneer-0.29/MANIFEST.in000066400000000000000000000004501445202303300166750ustar00rootroot00000000000000# we need to include a lot, because this package isn't really made up of # packages include MANIFEST.in README.md NOTES developers.md details.md LICENSE tox.ini recursive-include src *.py recursive-include test *.py *.pyx *.c *.cfg *.toml README MANIFEST.in rundemo placeholder rundemo-template python-versioneer-0.29/NEWS.md000066400000000000000000000171321445202303300162420ustar00rootroot00000000000000## Release 0.29 (7-Jul-2023) This release allows for `pyproject.toml`-only build systems to allow versioneer to find the project root, despite an absense of a `setup.py`. Provides some error message support when the pyproject.toml is malformed. Finally we added basic type-hinting to the project, which should help users of type-checking systems ensure correct code when vendoring. With thanks to Dimitri Papadopoulos Orfanos, Mike Taves, '@gamecss' and '@GCS-ZHN' for contributions. * FIX: Add error output when `pyproject.toml` is malformed. by @GCS-ZHN (#361) * FIX: Add name to `setup.py` to work around a github dependency graph bug. by @mwtwoes. (#360) * ENH: Add basic type information throughout project. by @Callek (#365 and #367) * ENH: Detect `pyproject.toml` as project root (to support `PDM`). by @gamecss (#371) * MNT: Overwrite version file instead of delete/unlink. by @DimitriPapadopoulos (#353) * MNT: Use `https` for the unlicense url. by @DimitriPapadopoulos (#356) * MNT: Removal of CJM as maintainer. by @effigies (#359) * MNT: Prepare release 0.29. by @Callek (#373) * CI: Use 3.11 release (not rc). by @DimitriPapadopoulos (#355) ## Release 0.28 (27-Oct-2022) This release adds official support for Python 3.11, including using the built-in tomllib instead of the third-party tomli, when available. With thanks to Michał Górny for contributions. * FIX: Handle unset `versionfile_build` in `build_ext` by @mgorny (#347) * ENH: Support built-in tomllib for Python 3.11+ by @mgorny (#348) ## Release 0.27 (19-Oct-2022) This release fixes a bug with non-isolated builds of Versioneer and for packages that provide their own `sdist` command. With thanks to Dimitri Papadopoulos and Michał Górny for contributions. * FIX: Always bootstrap in setup.py to avoid incompatibility with old versioneer by @mgorny (#344) * FIX: Mixup between `_egg_info` and `_sdist` by @DimitriPapadopoulos (#342) * STY: Merge `endswith` checks by @DimitriPapadopoulos (#337) * STY: Useless inheritance from object by @DimitriPapadopoulos (#336) * CI: python-version should be a string, not a float by @DimitriPapadopoulos (#340) * CI: Automatically update GitHub Actions in the future by @DimitriPapadopoulos (#341) ## Release 0.26 (6-Sep-2022) This release adds support for configuring versioneer through pyproject.toml and removes itself from the list of explicit build requirements, which caused problems with `--no-binary` installations. * FIX: Remove versioneer from build-system.requires by @effigies (#334) * ENH: Support configuration in pyproject.toml by @effigies (#330) ## Release 0.25 (2-Sep-2022) This release makes minor changes to the metadata, ensures tests run correctly from sdist packages, and uses non-vendored versioneer to version itself. With thanks to Simão Afonso for contributions. * FIX: Include pyproject.toml in MANIFEST.in by @effigies (#326) * STY: Appease flake8 by @simaoafonso-pwt (#327) * MNT: Version versioneer with versioneer by @effigies (#323) * CI: Build package and push to PyPI on tag by @effigies (#328) * CI: Explicitly test sdist by @effigies (#329) ## Release 0.24 (30-Aug-2022) This release adds support for a non-vendored use of Versioneer. With thanks to Stefan Appelhoff and Yaroslav Halchenko for contributions. * MNT: Relicense to Unlicense by @effigies (#317) * ENH: in verbose mode do not hide stderr of git rev-parse by @yarikoptic (#318) * DOC: clarify upgrading to 0.23 doesn't require special actions by @sappelhoff (#321) * ENH: Prepare for py2exe dropping distutils support by @effigies (#319) * ENH: Allow versioneer to be used as a module in PEP-518 mode by @effigies (#294) ## Release 0.23 (12-Aug-2022) This release adds support for Setuptools' PEP-660 editable installations, drops support for Python 3.6, and supports startlingly old git versions. With thanks to Biastian Zim, Michał Górny, Igor S. Gerasimov, Christian Clauss, Anderson Bravalheri and Simão Afonso for contributions. * FIX: Adequate custom `build_py` command to changes in setuptools v64 by @abravalheri (#313) * FIX: skip version update on `build_ext` if .py does not exist by @mgorny (#297) * FIX: old GIT (<1.7.2) does not know about `--count` flag by @foxtran (#306) * FIX: Use only numeric versions in Git, ignore other tags with the same prefix by @effigies (#256) * FIX: Handle missing `tag_prefix` gracefully by @effigies (#308) * FIX: Restore `py_modules` field to setup.py by @effigies (#293) * ENH: Patch versioneer files into manifest at runtime by @effigies (#309) * STY: Undefined name: VersioneerBadRootError on line 51 by @cclauss (#305) * STY: Appease flake8 by @simaoafonso-pwt (#312) * MNT: Drop 3.6 support, remove old hacks by @effigies (#288) * MNT: Clarify license as CC0-1.0 by @BastianZim (#292) * MNT: Drop distutils by @effigies (#289) * MNT: Disable editable installs of versioneer (they will not work) by @effigies (#307) * CI: Update gh-actions PLATFORM variable to avoid double-testing by @effigies (#311) ## Release 0.22 (07-Mar-2022) This release fixes failures in Windows related to different handling of asterisk characters depending on the shell and the presence of a prefix. This release explicitly has been tested on Python 3.10 and is the final release that will support Python 3.6 or distutils. With thanks to John Wodder, Mathijs van der Vlies and Christian Schulze for their contributions. * FIX: Unset `GIT_DIR` environment variable while retrieving version information from git (#280) * FIX: Hide console window if pythonw.exe is used (#285) * FIX: Broken tag prefix on Windows and add CI (#283) * FIX: Default to setuptools, only falling back to distutils (#276) * TEST: Verify and note Python 3.10 support (#272) * MNT: Run CI weekly to catch upstream deprecations quickly (#281) ## Release 0.21 (13-Oct-2021) With thanks to Dimitri Papadopoulos Orfanos, Andrew Tolmie, Michael Niklas, Mike Taves, Ryan Mast, and Yaroslav Halchenko for contributions. * FIX: Escape asterisk in `git describe` call on Windows (#262) * ENH: Add some type annotations to play nicely with mypy (#269) * ENH: Respect tags with `.postN` in pep440-pre style (#261) * TEST: Subproject installations fixed in Pip 21.3, remove expected failure marks (#271) * STY: Fix typos (#260 and #266) * STY: Centralize pylint hints in header (#270) * MNT: Use `os.path` and `pathlib` consistently (#267) ## Release 0.20 (13-Jul-2021) With thanks to Tanvi Moharir, Ashutosh Varma, Benjamin Rüth, Lucas Jansen, Timothy Lusk and Barret O'Brock for contributions. * Respect `versionfile_source` in `__init__.py` snippet (#241) * Add `pep440-branch` and `pep440-post-branch` styles (#164) * Stop testing deprecated `easy_install`, support left in for now (#237) * Use `versionfile_build` instead of `versionfile_source` where needed (#242) * Improve handling of refname edge cases (#229) * Clarify installation in docs (#235) * Play nicely with custom `build_ext`s (#232) ## Release 0.19 (10-Nov-2020) Versioneer's 0.19 release is the first under new maintainership, and most of the work done has been maintenance along with a few features and bug fixes. No significant changes to the mode of operation have been included here. The current maintainers are Nathan Buckner, Yaroslav Halchenko, Chris Markiewicz, Kevin Sheppard and Brian Warner. * Drop support for Python < 3.6, test up to Python 3.9 * Strip GPG signature information from date (#222) * Add `bdist_ext` cmdclass, to support native code extensions (#171) * Canonicalize pep440-pre style (#163) * Take arguments to `get_cmdclass` ## Release 0.18 (01-Jan-2017) * switch to entrypoints to get the right executable suffix on windows (#131) * update whitespace to appease latest flake8 Thanks to xoviat for the windows patch. python-versioneer-0.29/NOTES000066400000000000000000000072661445202303300157660ustar00rootroot00000000000000# -*- org -*- * github tarballs ** when downloaded from the "tags" page, look like: warner-python-ed25519-0.4-0-gdeb78a0.tar.gz ** and unpack into: warner-python-ed25519-deb78a0/ ** That's a bummer, I was hoping they'd be like "python-ed25519-0.4.tar.gz" and unpack into python-ed25519-0.4/ . * git attributes --help: add .gitattributes, with "_version.py export-subst" then put $Format:XXX$ in the file, with XXX from git-log's --pretty=format: $Format:%H$ -> commit hash %h -> abbreviated commit hash %d -> ref names, like git log --decorate also "filename export-ignore": won't be added to an archive might be possible to get github's 'git archive' to put the tag name into a _version.py file * first attempt at a plan: ** in the source tree - commit a _version.py with a s="$Format:%d$" and some massaging code - need to parse, strip boring refs, return highest non-boring one ** when using archive from github - __init__.py pulls massaged string from _version.py ** when building from checkout for testing - 'setup.py build' does git-describe and replaces _version.py with a one-line variable set - trouble: modifying a committed file, VCS will complain - avoid by subclassing 'build', only modify build/lib../../_version.py ** when building sdist from checkout - 'setup.py sdist' does git-describe and replaces _version.py - less trouble with modifying a committed file, if we always do sdist from a clean checkout (which is probably a good idea anyways) ** when running from source from checkout - ?? need to do git-describe from _version.py, eww ** problems: *** modifying _version.py during 'setup.py build' (for testing), leaves tree in modified state, and you don't want to accidentally replace that _version.py *** really we want to modify a file in build/, not in the source tree - might be possible for 'setup.py build', since we can get control after the superclass method has run - 'setup.py build' uses distutils.command.build_py build_py.build_module to copy things into the build directory, can use to figure out the _version.py to modify. Check for hardlinks. Hm, looks like build doesn't use hardlinks, although sdist does. - probably not so possible for 'setup.py sdist'. But maybe that's ok. - actually, cmd.sub_commands=[] is a list of (name,runp) that are invoked after self.filelist is created (but not populated), could be used to invoke extra commands - self.make_release_tree() is what creates the tree that will be archived.. could override that, modify _version.py on the way out - good to know: make_release_tree() creates hardlinks if possible. If 'setup.py build' does this too, that would explain why it's not always necessary to rebuild when source files are changed. - need to ensure _version.py is not a link before changing it - make_release_tree(base_dir, files) creates os.path.join(base_dir,f) for f in files **** let's investigate this: - 'setup.py build' runs superclass, looks for .git, if present: - use git-describe - locate build/STUFF/../_version.py - replace it with one-line string variable set - if no .git, assume the version number is already good, leave it alone - 'setup.py sdist' looks for .git first, modifies _version.py in source tree, then runs superclass *** could modify a different file, one which is in .gitignore - _version.py will need to import that *** setup.py build * current problems ** DONE running 'setup.py build' from a git-archive tree - need version_from_expanded_variable() in versioneer.py too, not just _version.py python-versioneer-0.29/README.md000066400000000000000000000327431445202303300164300ustar00rootroot00000000000000The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain (Unlicense) * Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3 * [![Latest Version][pypi-image]][pypi-url] * [![Build Status][travis-image]][travis-url] This is a tool for managing a recorded version number in setuptools-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install Versioneer provides two installation modes. The "classic" vendored mode installs a copy of versioneer into your repository. The experimental build-time dependency mode is intended to allow you to skip this step and simplify the process of upgrading. ### Vendored mode * `pip install versioneer` to somewhere in your $PATH * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is available, so you can also use `conda install -c conda-forge versioneer` * add a `[tool.versioneer]` section to your `pyproject.toml` or a `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) * Note that you will need to add `tomli; python_version < "3.11"` to your build-time dependencies if you use `pyproject.toml` * run `versioneer install --vendor` in your source tree, commit the results * verify version information with `python setup.py version` ### Build-time dependency mode * `pip install versioneer` to somewhere in your $PATH * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is available, so you can also use `conda install -c conda-forge versioneer` * add a `[tool.versioneer]` section to your `pyproject.toml` or a `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) * add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) to the `requires` key of the `build-system` table in `pyproject.toml`: ```toml [build-system] requires = ["setuptools", "versioneer[toml]"] build-backend = "setuptools.build_meta" ``` * run `versioneer install --no-vendor` in your source tree, commit the results * verify version information with `python setup.py version` ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes). The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg` and `pyproject.toml`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install --[no-]vendor` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## Similar projects * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time dependency * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of versioneer * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools plugin ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the "Unlicense", as described in https://unlicense.org/. [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg [pypi-url]: https://pypi.python.org/pypi/versioneer/ [travis-image]: https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer python-versioneer-0.29/UPGRADING.md000066400000000000000000000044071445202303300170070ustar00rootroot00000000000000# Upgrading Versioneer In Your Project Some releases of Versioneer change the way your setup.py and setup.cfg must be configured. This document contains a list of releases where, when upgrading from an older release, you must make changes in your project. ## Upgrading to 0.27 through 0.29 Nothing special. ## Upgrading to 0.26 Versioneer now supports configuration through `pyproject.toml` using a `[tool.versioneer]` table. To use this feature, you will need to add the `[toml]` extra to `build-system.requires`, e.g., ```toml [build-system] requires = ["setuptools", "versioneer[toml]==0.26"] ``` ## Upgrading to 0.24 If you are transitioning to a non-vendored mode for versioneer, add `"versioneer"` to your `pyproject.toml`. You now update the version file by using `versioneer install --no-vendor` instead of `python versioneer.py setup`. Consider using `"versioneer @ 0.24"` to ensure future breaking changes don't affect your build. For vendored installations, nothing changes. ## Upgrading to 0.16 through 0.23 Nothing special. ## Upgrading to 0.15 Starting with this version, Versioneer is configured with a `[versioneer]` section in your `setup.cfg` file. Earlier versions required the `setup.py` to set attributes on the `versioneer` module immediately after import. The new version will refuse to run (raising an exception during import) until you have provided the necessary `setup.cfg` section. In addition, the Versioneer package provides an executable named `versioneer`, and the installation process is driven by running `versioneer install`. In 0.14 and earlier, the executable was named `versioneer-installer` and was run without an argument. ## Upgrading to 0.14 0.14 changes the format of the version string. 0.13 and earlier used hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a plus-separated "local version" section strings, with dot-separated components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old format, but should be ok with the new one. ## Upgrading from 0.11 to 0.12 Nothing special. ## Upgrading from 0.10 to 0.11 You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running `setup.py setup_versioneer`. This will enable the use of additional version-control systems (SVN, etc) in the future. python-versioneer-0.29/_version.py000066400000000000000000000577201445202303300173510ustar00rootroot00000000000000 # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. # Generated by versioneer-0.29 # https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" import errno import os import re import subprocess import sys from typing import Any, Callable, Dict, List, Optional, Tuple import functools def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = " (tag: 0.29)" git_full = "28c613dbef5fce09dc3ba6b1baa811c2d76b2245" git_date = "2023-07-07 10:50:03 -0400" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" VCS: str style: str tag_prefix: str parentdir_prefix: str versionfile_source: str verbose: bool def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "" cfg.versionfile_source = "_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY: Dict[str, str] = {} HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command( commands: List[str], args: List[str], cwd: Optional[str] = None, verbose: bool = False, hide_stderr: bool = False, env: Optional[Dict[str, str]] = None, ) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) process = None popen_kwargs: Dict[str, Any] = {} if sys.platform == "win32": # This hides the console window if pythonw.exe is used startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW popen_kwargs["startupinfo"] = startupinfo for command in commands: try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git process = subprocess.Popen([command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs) break except OSError as e: if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = process.communicate()[0].strip().decode() if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, process.returncode return stdout, process.returncode def versions_from_parentdir( parentdir_prefix: str, root: str, verbose: bool, ) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords: Dict[str, str] = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) except OSError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords( keywords: Dict[str, str], tag_prefix: str, verbose: bool, ) -> Dict[str, Any]: """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') if not re.match(r'\d', r): continue if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs( tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command ) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # GIT_DIR can interfere with correct operation of Versioneer. # It may be intended to be passed to the Versioneer-versioned project, # but that should not change where we get our version from. env = os.environ.copy() env.pop("GIT_DIR", None) runner = functools.partial(runner, env=env) _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = runner(GITS, [ "describe", "--tags", "--dirty", "--always", "--long", "--match", f"{tag_prefix}[[:digit:]]*" ], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") branch_name = branch_name.strip() if branch_name == "HEAD": # If we aren't exactly on a branch, pick a branch which represents # the current commit. If all else fails, we are on a branchless # commit. branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) # --contains was added in git-1.5.4 if rc != 0 or branches is None: raise NotThisMethod("'git branch --contains' returned error") branches = branches.split("\n") # Remove the first line if we're running detached if "(" in branches[0]: branches.pop(0) # Strip off the leading "* " from the list of branches. branches = [branch[2:] for branch in branches] if "master" in branches: branch_name = "master" elif not branches: branch_name = None else: # Pick the first branch that is returned. Good or bad. branch_name = branches[0] pieces["branch"] = branch_name # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) pieces["distance"] = len(out.split()) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_branch(pieces: Dict[str, Any]) -> str: """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards (a feature branch will appear "older" than the master branch). Exceptions: 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the post-release version number (or -1 if no post-release segment is present). """ vc = str.split(ver, ".post") return vc[0], int(vc[1] or 0) if len(vc) == 2 else None def render_pep440_pre(pieces: Dict[str, Any]) -> str: """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: if pieces["distance"]: # update the post release segment tag_version, post_version = pep440_split_post(pieces["closest-tag"]) rendered = tag_version if post_version is not None: rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) else: rendered += ".post0.dev%d" % (pieces["distance"]) else: # no commits, use the tag as the version rendered = pieces["closest-tag"] else: # exception #1 rendered = "0.post0.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += "+g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-branch": rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-post-branch": rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} python-versioneer-0.29/details.md000066400000000000000000000275421445202303300171210ustar00rootroot00000000000000 ## What environments does Versioneer support? Versioneer may be called upon to calculate version information from one of four different environments. The first, "from-vcs", is used when run from a checked-out source tree, and asks the version-control tools for information (e.g. `git describe`). This provides the most data: it can examine the commit history, find recent tags, and detect uncommitted changes (a "dirty" tree). In general, source distributions (`setup.py sdist`) are created from this environment, and the calculated version string is embedded into the generated tarball (by replacing `_version.py` with a "short form" that contains literal strings instead of code to run git). When these tarballs are unpacked, they provide the second environment, "from-file". This environment is also created when you use `setup.py build` and then import code from the generated `build/` directory. All the version strings come from the previous "from-vcs" environment that created the tarball, frozen at that point. `_version.py` might record the fact that the sdist was created from a dirty tree, however it is not possible to detect additional changes. Sometimes you use the VCS tools to make a tarball directly, like `git archive`, without using `setup.py sdist`. Unpacking such a tarball results in the third environment, "from-keyword". The resulting source tree will contain expanded keywords in the `_version.py` file, which tells it a git revision, and an exact tag (if any), but cannot generally detect things like commits-since-recent-tag, or recent-tag at all. As with from-file, this does not give Versioneer enough information to detect additional changes, and the "dirty" status is always False. In general, you should only use `git archive` on tagged revisions. Creating archives from untagged revisions will result in a version string that's simply a copy of the full SHA1 hash. If all these methods fail, Versioneer attempts to pull a version string from the name of the parent directory, since tarballs are frequently built this way. This environment is called "from-parentdir". This can provide the main version string, but not a full-revision hash. "dirty" is always False. If even that fails, Versioneer will either report a version of "0+unknown", and will signal an error. ## What does get_version() return? It returns `get_versions()["version"]`. See below for what that means. `get_version()` and `get_versions()` are the main functions through which Versioneer provides version data to your program. Your `setup.py` can do `from versioneer import get_version`, and your top-level runtime modules can do `from ._version import get_version`. ## What does get_versions() return? `get_versions()` returns a small dictionary of rendered version information, which always contains four keys: | key | description | | --- | --- | | `version` | The version string as selected by `version-style` | | `full-revisionid` | A full-length hex SHA1 (for git), or equivalent (for other VCS systems), or None. | | `dirty` | A boolean, True if the source tree has local changes. None if unknown. | | `error` | None, or a error description string | `version` will always be a string (`str` on py3, `unicode` on py2): if Versioneer is unable to compute a version, it will be set to `"0+unknown"`. `full-revisionid` will be a str/unicode, or None if that information is not available. `dirty` will be a boolean, or None if unavailable. `error` will be None, or a str/unicode if there was an error. If the `error` key is non-None, that indicates that Versioneer was unable to obtain a satisfactory version string. There are several possibilities: * the closest tag found did not start with the configured `tag_prefix` * from-keyword mode did not find any tags (e.g. an archive created from a bare SHA1, instead of from a tag) * the output of `git describe` was unparsable * all modes failed: the source tree has no `.git` directory, expanded keywords, pre-built version data ("from-file"), or useful parent directory name. When `error` occurs, `version` will be set to "0+unknown", `full-revisionid` will be set (in from-vcs mode) or None (in other modes), and `dirty` will be None. If you want to prevent builds from happening without solid version information, use a snippet like this in your `__init__.py` or `setup.py`: ```python v = get_versions() if v["error"]: raise RuntimeError(v["error"]) ``` `get_versions()["version"]` is the most useful value, intended for `setup.py` and runtime version introspection to support a CLI command's `--version` argument. This is available in all modes, but has the most fidelity in from-vcs environments. `get_versions()["full-revisionid"]` is probably useful for an extended form of CLI `--version`, and for including in machine-generated error/crash reports. In from-parentdir mode, its value will be `None`, but in the other modes (from-vcs, from-file, from-keyword) it will be the git SHA1 hash (a 40-character lowercase hexadecimal string), or the equivalent for other VCS systems. `get_versions()["dirty"]` might be useful when running tests, to remind someone viewing a transcript that there are uncommitted changes which might affect the results. In most cases, this information will also be present in `["version"]` (it will contain a `-dirty` suffix). It may also be useful for `setup.py` decision making: ```python if versioneer.get_versions()["dirty"]: raise MustCommitError("please commit everything before making tarballs") ``` `dirty` is most meaningful in from-vcs mode. In from-file mode, it records the dirty status of the tree from which the setup.py build/sdist command was run, and is not affected by subsequent changes to the generated tree. In from-keyword and from-parentdir mode, it will always be `False`. ## How do I select a version `style`? In from-vcs mode (inside a git checkout), Versioneer can get a lot of data about the state of the tree: the current tag (if any), the closest historical tag, the number of commits since that tag, the exact revision ID, and the 'dirty' state. These pieces are used by a renderer function to compute the `['version']` in the small dictionary that will be returned by `get_versions()`. The renderer function is controlled by a configuration value called `style`. You can use this to select the kind of version string you want to use. The available forms are: | key | description | | --- | ----------- | | `default` | same as `pep440` | | `pep440` | `TAG[+DISTANCE.gSHORTHASH[.dirty]]`, a PEP-440 compatible version string which uses the "local version identifier" to record the complete non-tag information. This format provides compliant versions even under unusual/error circumstances. It returns `0+untagged.DISTANCE.gHASH[.dirty]` before any tags have been set, `0+unknown` if the tree does not contain enough information to report a version (e.g. the .git directory has been removed), and `0.unparsable[.dirty]` if `git describe` emits something weird. If TAG includes a plus sign, then this will use a dot as a separator instead (`TAG[.DISTANCE.gSHORTHASH[.dirty]]`).| | `pep440-branch`| `TAG[[.dev0]+DISTANCE.gSHORTHASH[.dirty]]`, a PEP-440 compatible version string, identical to the `pep440` style with the addition of a `.dev0` component if the tree is on a branch other than `master`. Note that PEP-0440 rules indicate that X.dev0 sorts as "older" than X, so feature branches will always appear "older" than the `master` branch in this format, even with `pip install --pre`. | | `pep440-pre` | `TAG[.post0.devDISTANCE]`, a PEP-440 compatible version string which loses information but has the useful property that non-tagged versions qualify for `pip install --pre` (by virtue of the `.dev` component). This form does not record the commit hash, nor the `-dirty` flag. | | `pep440-post` | `TAG[.postDISTANCE[.dev0]+gSHORTHASH]`, a PEP-440 compatible version string which allows all commits to get installable versions, and retains the commit hash. | `pep440-post-branch` | `TAG[.postDISTANCE[.dev0]+gHEX[.dirty]]`, a PEP-440 compatible version string, similar to the `pep440-post` style except the `.dev0` component is used to determine if the project tree is on a feature branch. It is appended if the tree is on a non `master` branch so that packages generated are not installed by pip unless `--pre` is specified. | | `pep440-old` | `TAG[.postDISTANCE[.dev0]]`, a PEP-440 compatible version string which loses information but enables downstream projects to depend upon post-release versions (by counting commits). The ".dev0" suffix indicates a dirty tree. This form does not record the commit hash. If nothing has been tagged, this will be `0.postDISTANCE[.dev0]`. Note that PEP-0440 rules indicate that `X.dev0` sorts as "older" than `X`, so our -dirty flag is expressed somewhat backwards (usually "dirty" indicates newer changes than the base commit), but PEP-0440 offers no positive post-".postN" component. You should never be releasing software with -dirty anyways. | | `git-describe` | `TAG[-DISTANCE-gSHORTHASH][-dirty]`, equivalent to `git describe --tags --dirty --always`. The distance and shorthash are only included if the commit is not tagged. If nothing was tagged, this will be the short revisionid, plus "-dirty" if dirty. | | `git-describe-long` | `TAG-DISTANCE-gSHORTHASH[-dirty]`, equivalent to `git describe --tags --dirty --always --long`. The distance and shorthash are included unconditionally. As with `git-describe`, if nothing was tagged, this will be the short revisionid, possibly with "-dirty". | ## Pieces used by from-vcs Internally, the from-vcs function is expected to return the following values. The renderer uses these to compute the version string. | key | description | | --- | ----------- | | `long` | a full-length id (hex SHA1 for git) for the current revision | | `short` | a truncated form of `full-revisionid`, typically 7 characters for git (but might be more in large repositories if necessary to uniquely identify the commit) | | `error` | a string, if something was unparsable | | `closest-tag` | a string (or None if nothing has been tagged), with the name of the closest ancestor tag. The "tag prefix" is stripped off. | | `distance` | an integer, the number of commits since the most recent tag. If the current revision is tagged, this will be 0. If nothing has been tagged, this will be the total number of commits. | | `dirty` | a boolean, indicating that the working directory has modified files | If a value is not available (e.g. the source tree does not contain enough information to provide it), the dictionary will not contain that key. The from-keywords mode will only produce `exact-tag` and `full-revisionid`. If the git-archive tarball was created from a non-tagged revision, `exact-tag` will be None. These tarballs use keyword expansion, and there is no git-attributes keyword that replicates the tag-searching features of git-describe. `dirty` modification to the source tree can only be detected from a git checkout. A build or sdist created from a dirty tree will be marked as dirty, however an sdist created from a clean tree which is subsequently modified will not be reported as dirty. ## What version strings will we get in each environment? (note: this is not yet accurate) | key | file | keywords | git-describe | parentdir | | --- | ------------------- | ----------------- | ------------------------ | --------- | | pep440 | TAG[+DIST.gHASH] | TAG or 0.unknown? | TAG[+DIST.gHASH[.dirty]] | TAG or ? | | pep440-pre | TAG[.post0.devDIST] | TAG or ? | TAG[.post0.devDIST] | TAG or ? | | pep440-old | TAG[.postDIST] | TAG or ? | TAG[.postDIST[.dev0]] | TAG or ? | | git-describe | TAG[-DIST-gHASH] | TAG or ? | TAG[-DIST-gHASH][-dirty] | TAG or ? | | long | TAG-DIST-gHASH | TAG-gHASH or ? | TAG-DIST-gHASH[-dirty] | ? | python-versioneer-0.29/developers.md000066400000000000000000000016411445202303300176340ustar00rootroot00000000000000 ## To add support for a new VCS So, you want to extend Versioneer to support your favorite version control system? Great! Here's what to do: * 1: `mkdir src/NEW-VCS/` * 2: create work-alikes for everything in `src/git/` * 3: add NEW-VCS to the loop in setup.py `generate_versioneer()` * 4: add clauses for NEW-VCS to `src/get_versions.py`, for both the from-keywords and from-vcs sections * 5: add `test/test_NEWVCS.py`, copying the general style of `test_git.py` but using NEWVCS instead of git. * 6: add a line to .travis.yml to execute your `test_NEWVCS.py` upon checkins Then file a pull request! ## To make a release * test, etc * edit setup.py to set VERSION=, commit -m "release X.X" * push origin master X.X * python setup.py bdist_wheel --universal sdist register upload (if setup.py doesn't acknowledge `bdist_wheel`, and the "wheel" package is installed, look for stale versions of setuptools and delete them) python-versioneer-0.29/pyproject.toml000066400000000000000000000022701445202303300200550ustar00rootroot00000000000000[build-system] requires = ["setuptools", "tomli; python_version < '3.11'"] build-backend = "setuptools.build_meta" [project] name = "versioneer" description = "Easy VCS-based management of project version strings" authors = [ { name = "Brian Warner" }, ] maintainers = [ { name = "Justin Wood", email = "Callek+versioneer@gmail.com" }, { name = "Nathan Buckner", email = "bucknerns@users.noreply.github.com" }, ] readme = "README.md" license = { file="LICENSE" } requires-python = ">=3.7" classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "License :: OSI Approved :: The Unlicense (Unlicense)", ] dynamic = ["version"] [project.urls] "Homepage" = "https://github.com/python-versioneer/python-versioneer" [project.scripts] "versioneer" = "versioneer:main" [project.optional-dependencies] toml = ["tomli; python_version < '3.11'"] [tool.versioneer] VCS = "git" style = "pep440" versionfile_source = "_version.py" parentdir_prefix = "" python-versioneer-0.29/setup.py000077500000000000000000000126651445202303300166670ustar00rootroot00000000000000#!/usr/bin/env python import os, base64, tempfile, io from importlib import util as ilu from pathlib import Path from typing import List, Tuple from setuptools import setup, Command from setuptools.command.build_py import build_py from setuptools.command.develop import develop as _develop # If versioneer is not installed in the environment, then we will need to # need to build and exec it. The build requires a VERSION, so we might need # this before we know its value. VERSION = "0+bootstrap" def ver(s: str) -> str: return s.replace("@VERSIONEER-VERSION@", VERSION) def get( fn: str, add_ver: bool = False, unquote: bool = False, do_strip: bool = False, do_readme: bool = False ) -> str: with open(fn) as f: text = f.read() if add_ver: text = ver(text) if unquote: text = text.replace("%", "%%") if do_strip: text = "".join(line for line in text.splitlines(keepends=True) if not line.endswith("# --STRIP DURING BUILD\n")) if do_readme: text = text.replace("@README@", get("README.md")) return text def get_vcs_list() -> List[str]: project_path = Path(__file__).absolute().parent / "src" return [filename for filename in os.listdir(str(project_path)) if Path.is_dir(project_path / filename) and filename != "__pycache__"] def generate_long_version_py(VCS: str) -> str: s = io.StringIO() s.write(get(f"src/{VCS}/long_header.py", add_ver=True, do_strip=True)) for piece in ["src/subprocess_helper.py", "src/from_parentdir.py", f"src/{VCS}/from_keywords.py", f"src/{VCS}/from_vcs.py", "src/render.py", f"src/{VCS}/long_get_versions.py"]: s.write(get(piece, unquote=True, do_strip=True)) return s.getvalue() def generate_versioneer_py() -> bytes: s = io.StringIO() s.write(get("src/header.py", add_ver=True, do_readme=True, do_strip=True)) s.write(get("src/subprocess_helper.py", do_strip=True)) for VCS in get_vcs_list(): s.write(f"LONG_VERSION_PY['{VCS}'] = r'''\n") s.write(generate_long_version_py(VCS)) s.write("'''\n") s.write("\n\n") s.write(get(f"src/{VCS}/from_keywords.py", do_strip=True)) s.write(get(f"src/{VCS}/from_vcs.py", do_strip=True)) s.write(get(f"src/{VCS}/install.py", do_strip=True)) s.write(get("src/from_parentdir.py", do_strip=True)) s.write(get("src/from_file.py", add_ver=True, do_strip=True)) s.write(get("src/render.py", do_strip=True)) s.write(get("src/get_versions.py", do_strip=True)) s.write(get("src/cmdclass.py", do_strip=True)) s.write(get("src/setupfunc.py", do_strip=True)) return s.getvalue().encode("utf-8") class make_versioneer(Command): description = "create standalone versioneer.py" user_options: List[Tuple[str, str, str]] = [] boolean_options: List[str] = [] def initialize_options(self) -> None: pass def finalize_options(self) -> None: pass def run(self) -> None: with open("versioneer.py", "w") as f: f.write(generate_versioneer_py().decode("utf8")) class make_long_version_py_git(Command): description = "create standalone _version.py (for git)" user_options: List[Tuple[str, str, str]] = [] boolean_options: List[str] = [] def initialize_options(self) -> None: pass def finalize_options(self) -> None: pass def run(self) -> None: assert os.path.exists("versioneer.py") long_version = generate_long_version_py("git") with open("git_version.py", "w") as f: f.write(long_version % {"DOLLAR": "$", "STYLE": "pep440", "TAG_PREFIX": "tag-", "PARENTDIR_PREFIX": "parentdir_prefix", "VERSIONFILE_SOURCE": "versionfile_source", }) class my_build_py(build_py): def run(self) -> None: v = generate_versioneer_py() v_b64 = base64.b64encode(v).decode("ascii") lines = [v_b64[i:i+60] for i in range(0, len(v_b64), 60)] v_b64 = "\n".join(lines)+"\n" s = Path("src/installer.py").read_text() s = ver(s.replace("@VERSIONEER-INSTALLER@", v_b64)) with tempfile.TemporaryDirectory() as tempdir: installer = Path(tempdir) / "versioneer.py" installer.write_text(s) self.package_dir.update({'': os.path.relpath(installer.parent)}) build_py.run(self) # The structure of versioneer, with its components that are compiled into a single file, # makes it unsuitable for development mode. class develop(_develop): def run(self) -> None: # type: ignore[override] raise RuntimeError("Versioneer cannot be installed in developer/editable mode.") # Bootstrap a versioneer module to guarantee that we get a compatible version versioneer = ilu.module_from_spec( ilu.spec_from_loader('versioneer', loader=None) # type: ignore[arg-type] ) exec(generate_versioneer_py(), versioneer.__dict__) VERSION = versioneer.get_version() setup( name="versioneer", # need by GitHub dependency graph version=VERSION, py_modules=["versioneer"], cmdclass=versioneer.get_cmdclass({ "build_py": my_build_py, "make_versioneer": make_versioneer, "make_long_version_py_git": make_long_version_py_git, "develop": develop, }) ) python-versioneer-0.29/src/000077500000000000000000000000001445202303300157275ustar00rootroot00000000000000python-versioneer-0.29/src/__init__.py000066400000000000000000000000001445202303300200260ustar00rootroot00000000000000python-versioneer-0.29/src/cmdclass.py000066400000000000000000000260171445202303300201000ustar00rootroot00000000000000import os, sys # --STRIP DURING BUILD from typing import Any, Dict, List, Optional, Tuple # --STRIP DURING BUILD from .header import LONG_VERSION_PY, get_root, get_config_from_root # --STRIP DURING BUILD from .get_versions import get_versions # --STRIP DURING BUILD from .from_file import write_to_version_file # --STRIP DURING BUILD def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None): """Get the custom setuptools subclasses used by Versioneer. If the package uses a different cmdclass (e.g. one from numpy), it should be provide as an argument. """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/python-versioneer/python-versioneer/issues/52 cmds = {} if cmdclass is None else cmdclass.copy() # we add "version" to setuptools from setuptools import Command class cmd_version(Command): description = "report generated version string" user_options: List[Tuple[str, str, str]] = [] boolean_options: List[str] = [] def initialize_options(self) -> None: pass def finalize_options(self) -> None: pass def run(self) -> None: vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # pip install -e . and setuptool/editable_wheel will invoke build_py # but the build_py command is not expected to copy any files. # we override different "build_py" commands for both environments if 'build_py' in cmds: _build_py: Any = cmds['build_py'] else: from setuptools.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) if getattr(self, "editable_mode", False): # During editable installs `.py` and data files are # not copied to build_lib return # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if 'build_ext' in cmds: _build_ext: Any = cmds['build_ext'] else: from setuptools.command.build_ext import build_ext as _build_ext class cmd_build_ext(_build_ext): def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_ext.run(self) if self.inplace: # build_ext --inplace will only build extensions in # build/lib<..> dir with no _version.py to write to. # As in place builds will already have a _version.py # in the module dir, we do not need to write one. return # now locate _version.py in the new build/ directory and replace # it with an updated value if not cfg.versionfile_build: return target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) if not os.path.exists(target_versionfile): print(f"Warning: {target_versionfile} does not exist, skipping " "version update. This can happen if you are running build_ext " "without first running build_py.") return print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_ext"] = cmd_build_ext if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # type: ignore # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? try: from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore except ImportError: from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore class cmd_py2exe(_py2exe): def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["py2exe"] = cmd_py2exe # sdist farms its file list building out to egg_info if 'egg_info' in cmds: _egg_info: Any = cmds['egg_info'] else: from setuptools.command.egg_info import egg_info as _egg_info class cmd_egg_info(_egg_info): def find_sources(self) -> None: # egg_info.find_sources builds the manifest list and writes it # in one shot super().find_sources() # Modify the filelist and normalize it root = get_root() cfg = get_config_from_root(root) self.filelist.append('versioneer.py') if cfg.versionfile_source: # There are rare cases where versionfile_source might not be # included by default, so we must be explicit self.filelist.append(cfg.versionfile_source) self.filelist.sort() self.filelist.remove_duplicates() # The write method is hidden in the manifest_maker instance that # generated the filelist and was thrown away # We will instead replicate their final normalization (to unicode, # and POSIX-style paths) from setuptools import unicode_utils normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') for f in self.filelist.files] manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') with open(manifest_filename, 'w') as fobj: fobj.write('\n'.join(normalized)) cmds['egg_info'] = cmd_egg_info # we override different "sdist" commands for both environments if 'sdist' in cmds: _sdist: Any = cmds['sdist'] else: from setuptools.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self) -> None: versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir: str, files: List[str]) -> None: root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds python-versioneer-0.29/src/from_file.py000066400000000000000000000031121445202303300202400ustar00rootroot00000000000000SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (@VERSIONEER-VERSION@) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ import json # --STRIP DURING BUILD import re # --STRIP DURING BUILD from typing import Any, Dict # --STRIP DURING BUILD from .header import NotThisMethod # --STRIP DURING BUILD def versions_from_file(filename: str) -> Dict[str, Any]: """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except OSError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None: """Write the given version number to the given _version.py file.""" contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) python-versioneer-0.29/src/from_parentdir.py000066400000000000000000000021771445202303300213230ustar00rootroot00000000000000import os # --STRIP DURING BUILD from .header import NotThisMethod # --STRIP DURING BUILD from typing import Any, Dict # --STRIP DURING BUILD def versions_from_parentdir( parentdir_prefix: str, root: str, verbose: bool, ) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") python-versioneer-0.29/src/get_versions.py000066400000000000000000000064411445202303300210150ustar00rootroot00000000000000import os, sys # --STRIP DURING BUILD from typing import Any, Dict # --STRIP DURING BUILD from .header import HANDLERS, get_root, get_config_from_root # --STRIP DURING BUILD from .header import NotThisMethod # --STRIP DURING BUILD from .from_file import versions_from_file # --STRIP DURING BUILD from .from_parentdir import versions_from_parentdir # --STRIP DURING BUILD from .render import render # --STRIP DURING BUILD class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose: bool = False) -> Dict[str, Any]: """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None` assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} def get_version() -> str: """Get the short version string for this project.""" return get_versions()["version"] python-versioneer-0.29/src/git/000077500000000000000000000000001445202303300165125ustar00rootroot00000000000000python-versioneer-0.29/src/git/__init__.py000066400000000000000000000000001445202303300206110ustar00rootroot00000000000000python-versioneer-0.29/src/git/from_keywords.py000066400000000000000000000107211445202303300217570ustar00rootroot00000000000000import re # --STRIP DURING BUILD from typing import Any, Dict # --STRIP DURING BUILD from .long_header import NotThisMethod, register_vcs_handler # --STRIP DURING BUILD @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords: Dict[str, str] = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) except OSError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords( keywords: Dict[str, str], tag_prefix: str, verbose: bool, ) -> Dict[str, Any]: """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') if not re.match(r'\d', r): continue if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} python-versioneer-0.29/src/git/from_vcs.py000066400000000000000000000127121445202303300207050ustar00rootroot00000000000000import sys # --STRIP DURING BUILD import re # --STRIP DURING BUILD import os # --STRIP DURING BUILD import functools # --STRIP DURING BUILD from typing import Any, Callable, Dict # --STRIP DURING BUILD from .long_header import NotThisMethod, register_vcs_handler # --STRIP DURING BUILD from subprocess_helper import run_command # --STRIP DURING BUILD # --STRIP DURING BUILD # --STRIP DURING BUILD @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs( tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command ) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # GIT_DIR can interfere with correct operation of Versioneer. # It may be intended to be passed to the Versioneer-versioned project, # but that should not change where we get our version from. env = os.environ.copy() env.pop("GIT_DIR", None) runner = functools.partial(runner, env=env) _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = runner(GITS, [ "describe", "--tags", "--dirty", "--always", "--long", "--match", f"{tag_prefix}[[:digit:]]*" ], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") branch_name = branch_name.strip() if branch_name == "HEAD": # If we aren't exactly on a branch, pick a branch which represents # the current commit. If all else fails, we are on a branchless # commit. branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) # --contains was added in git-1.5.4 if rc != 0 or branches is None: raise NotThisMethod("'git branch --contains' returned error") branches = branches.split("\n") # Remove the first line if we're running detached if "(" in branches[0]: branches.pop(0) # Strip off the leading "* " from the list of branches. branches = [branch[2:] for branch in branches] if "master" in branches: branch_name = "master" elif not branches: branch_name = None else: # Pick the first branch that is returned. Good or bad. branch_name = branches[0] pieces["branch"] = branch_name # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) pieces["distance"] = len(out.split()) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces python-versioneer-0.29/src/git/install.py000066400000000000000000000027731445202303300205430ustar00rootroot00000000000000import os # --STRIP DURING BUILD import sys # --STRIP DURING BUILD from typing import Optional # --STRIP DURING BUILD from subprocess_helper import run_command # --STRIP DURING BUILD def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None: """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [versionfile_source] if ipy: files.append(ipy) if "VERSIONEER_PEP518" not in globals(): try: my_path = __file__ if my_path.endswith((".pyc", ".pyo")): my_path = os.path.splitext(my_path)[0] + ".py" versioneer_file = os.path.relpath(my_path) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: with open(".gitattributes", "r") as fobj: for line in fobj: if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True break except OSError: pass if not present: with open(".gitattributes", "a+") as fobj: fobj.write(f"{versionfile_source} export-subst\n") files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) python-versioneer-0.29/src/git/long_get_versions.py000066400000000000000000000040251445202303300226130ustar00rootroot00000000000000import os # --STRIP DURING BUILD from typing import Any, Dict # --STRIP DURING BUILD from .long_header import get_config, get_keywords, NotThisMethod # --STRIP DURING BUILD from .from_keywords import git_versions_from_keywords # --STRIP DURING BUILD from .from_vcs import git_pieces_from_vcs # --STRIP DURING BUILD from from_parentdir import versions_from_parentdir # --STRIP DURING BUILD from render import render # --STRIP DURING BUILD def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} python-versioneer-0.29/src/git/long_header.py000066400000000000000000000047131445202303300213400ustar00rootroot00000000000000# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. # Generated by versioneer-@VERSIONEER-VERSION@ # https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" import errno import os import re import subprocess import sys from typing import Any, Callable, Dict, List, Optional, Tuple import functools def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" VCS: str style: str tag_prefix: str parentdir_prefix: str versionfile_source: str verbose: bool def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY: Dict[str, str] = {} HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate python-versioneer-0.29/src/header.py000066400000000000000000000140511445202303300175320ustar00rootroot00000000000000 # Version: @VERSIONEER-VERSION@ """The Versioneer - like a rocketeer, but for versions. @README@ """ # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with # pylint:disable=attribute-defined-outside-init,too-many-arguments import configparser import errno import json import os import re import subprocess import sys from pathlib import Path from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union from typing import NoReturn import functools have_tomllib = True if sys.version_info >= (3, 11): import tomllib else: try: import tomli as tomllib except ImportError: have_tomllib = False from .get_versions import VersioneerBadRootError # --STRIP DURING BUILD class VersioneerConfig: """Container for Versioneer configuration parameters.""" VCS: str style: str tag_prefix: str versionfile_source: str versionfile_build: Optional[str] parentdir_prefix: Optional[str] verbose: Optional[bool] def get_root() -> str: """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") if not ( os.path.exists(setup_py) or os.path.exists(pyproject_toml) or os.path.exists(versioneer_py) ): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") if not ( os.path.exists(setup_py) or os.path.exists(pyproject_toml) or os.path.exists(versioneer_py) ): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. my_path = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(my_path), versioneer_py)) except NameError: pass return root def get_config_from_root(root: str) -> VersioneerConfig: """Read the project setup.cfg file to determine Versioneer config.""" # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . root_pth = Path(root) pyproject_toml = root_pth / "pyproject.toml" setup_cfg = root_pth / "setup.cfg" section: Union[Dict[str, Any], configparser.SectionProxy, None] = None if pyproject_toml.exists() and have_tomllib: try: with open(pyproject_toml, 'rb') as fobj: pp = tomllib.load(fobj) section = pp['tool']['versioneer'] except (tomllib.TOMLDecodeError, KeyError) as e: print(f"Failed to load config from {pyproject_toml}: {e}") print("Try to load it from setup.cfg") if not section: parser = configparser.ConfigParser() with open(setup_cfg) as cfg_file: parser.read_file(cfg_file) parser.get("versioneer", "VCS") # raise error if missing section = parser["versioneer"] # `cast`` really shouldn't be used, but its simplest for the # common VersioneerConfig users at the moment. We verify against # `None` values elsewhere where it matters cfg = VersioneerConfig() cfg.VCS = section['VCS'] cfg.style = section.get("style", "") cfg.versionfile_source = cast(str, section.get("versionfile_source")) cfg.versionfile_build = section.get("versionfile_build") cfg.tag_prefix = cast(str, section.get("tag_prefix")) if cfg.tag_prefix in ("''", '""', None): cfg.tag_prefix = "" cfg.parentdir_prefix = section.get("parentdir_prefix") if isinstance(section, configparser.SectionProxy): # Make sure configparser translates to bool cfg.verbose = section.getboolean("verbose") else: cfg.verbose = section.get("verbose") return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY: Dict[str, str] = {} HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" HANDLERS.setdefault(vcs, {})[method] = f return f return decorate python-versioneer-0.29/src/installer.py000066400000000000000000000036431445202303300203040ustar00rootroot00000000000000#!/usr/bin/env python import sys, base64 from typing import NoReturn VERSIONEER_b64 = """ @VERSIONEER-INSTALLER@ """ newver = "@VERSIONEER-VERSION@" VERSIONEER_PEP518 = True VERSIONEER = base64.b64decode(VERSIONEER_b64.encode('ASCII')) # Stub overwritten by exec() def setup_command() -> NoReturn: ... # type: ignore # Make versioneer usable via import exec(VERSIONEER.decode(), globals()) def detect_installed_version() -> str: """Find version string in vendored versioneer Raises FileNotFoundError if missing. """ with open("versioneer.py") as fobj: for i, line in enumerate(fobj): if line.startswith("# Version: "): return line[len("# Version: "):].strip() if i > 5: break return "unknown version" def vendor() -> None: """Install versioneer into current directory""" try: oldver = detect_installed_version() except FileNotFoundError: pass else: print(f"replacing old versioneer.py ({oldver})") with open("versioneer.py", "wb") as fobj: fobj.write(VERSIONEER) print(f"versioneer.py ({newver}) installed into local tree") def main() -> NoReturn: usage = "Usage: versioneer install [--vendor|--no-vendor]" if len(sys.argv) < 2 or len(sys.argv) > 3: print(usage) sys.exit(1) command = sys.argv[1] mode = "--vendor" if len(sys.argv) == 2 else sys.argv[2] if command in ("version", "--version"): print("versioneer (installer) %s" % newver) sys.exit(0) elif command in ("help", "-help", "--help"): print(usage) sys.exit(0) elif command != "install" or mode not in ("--vendor", "--no-vendor"): print(usage) sys.exit(1) if mode == "--vendor": vendor() print("Now running 'versioneer.py setup' to install the generated files..") setup_command() if __name__ == '__main__': main() python-versioneer-0.29/src/render.py000066400000000000000000000200511445202303300175560ustar00rootroot00000000000000from typing import Any, Dict, Optional, Tuple # --STRIP DURING BUILD def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_branch(pieces: Dict[str, Any]) -> str: """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards (a feature branch will appear "older" than the master branch). Exceptions: 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the post-release version number (or -1 if no post-release segment is present). """ vc = str.split(ver, ".post") return vc[0], int(vc[1] or 0) if len(vc) == 2 else None def render_pep440_pre(pieces: Dict[str, Any]) -> str: """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: if pieces["distance"]: # update the post release segment tag_version, post_version = pep440_split_post(pieces["closest-tag"]) rendered = tag_version if post_version is not None: rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) else: rendered += ".post0.dev%d" % (pieces["distance"]) else: # no commits, use the tag as the version rendered = pieces["closest-tag"] else: # exception #1 rendered = "0.post0.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += "+g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-branch": rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-post-branch": rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} python-versioneer-0.29/src/setupfunc.py000066400000000000000000000123011445202303300203120ustar00rootroot00000000000000 import configparser # --STRIP DURING BUILD import os, sys # --STRIP DURING BUILD from typing import NoReturn, Optional # --STRIP DURING BUILD from .header import get_config_from_root, get_root # --STRIP DURING BUILD from .header import LONG_VERSION_PY # --STRIP DURING BUILD from .git.install import do_vcs_install # --STRIP DURING BUILD CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ OLD_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ INIT_PY_SNIPPET = """ from . import {0} __version__ = {0}.get_versions()['version'] """ def do_setup() -> int: """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (OSError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") maybe_ipy: Optional[str] = ipy if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except OSError: old = "" module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] snippet = INIT_PY_SNIPPET.format(module) if OLD_SNIPPET in old: print(" replacing boilerplate in %s" % ipy) with open(ipy, "w") as f: f.write(old.replace(OLD_SNIPPET, snippet)) elif snippet not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(snippet) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) maybe_ipy = None # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(cfg.versionfile_source, maybe_ipy) return 0 def scan_setup_py() -> int: """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors def setup_command() -> NoReturn: """Set up Versioneer and exit with appropriate error code.""" errors = do_setup() errors += scan_setup_py() sys.exit(1 if errors else 0) if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": setup_command() python-versioneer-0.29/src/subprocess_helper.py000066400000000000000000000035041445202303300220320ustar00rootroot00000000000000import sys, subprocess, errno # --STRIP DURING BUILD from typing import Any, Dict, List, Optional, Tuple # --STRIP DURING BUILD def run_command( commands: List[str], args: List[str], cwd: Optional[str] = None, verbose: bool = False, hide_stderr: bool = False, env: Optional[Dict[str, str]] = None, ) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) process = None popen_kwargs: Dict[str, Any] = {} if sys.platform == "win32": # This hides the console window if pythonw.exe is used startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW popen_kwargs["startupinfo"] = startupinfo for command in commands: try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git process = subprocess.Popen([command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs) break except OSError as e: if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = process.communicate()[0].strip().decode() if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, process.returncode return stdout, process.returncode python-versioneer-0.29/test/000077500000000000000000000000001445202303300161175ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-pyproject/000077500000000000000000000000001445202303300215615ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-pyproject/README000066400000000000000000000000101445202303300224300ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoapp-pyproject/bin/000077500000000000000000000000001445202303300223315ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-pyproject/bin/rundemo000066400000000000000000000003671445202303300237330ustar00rootroot00000000000000#!/usr/bin/env python import demo from demo import _version print("__version__:%s" % demo.__version__) print("_version:%s" % str(_version)) versions = _version.get_versions() for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) python-versioneer-0.29/test/demoapp-pyproject/pyproject.toml000066400000000000000000000007011445202303300244730ustar00rootroot00000000000000[build-system] requires = ["setuptools>=61.0", "versioneer[toml] @ @REPOROOT@"] build-backend = "setuptools.build_meta" [project] name = "demo" authors = [ { name="Example Author", email="author@example.com" }, ] description = "Demo" requires-python = ">=3.7" dynamic = ["version"] [tool.versioneer] VCS = "@VCS@" versionfile_source = "src/demo/_version.py" versionfile_build = "demo/_version.py" tag_prefix = "demo-" parentdir_prefix = "demo-" python-versioneer-0.29/test/demoapp-pyproject/setup.py000066400000000000000000000003531445202303300232740ustar00rootroot00000000000000from setuptools import setup import versioneer setup( version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), zip_safe=True, packages=["demo"], package_dir={"": "src"}, scripts=["bin/rundemo"], ) python-versioneer-0.29/test/demoapp-pyproject/src/000077500000000000000000000000001445202303300223505ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-pyproject/src/demo/000077500000000000000000000000001445202303300232745ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-pyproject/src/demo/__init__.py000066400000000000000000000000011445202303300253740ustar00rootroot00000000000000 python-versioneer-0.29/test/demoapp-script-only/000077500000000000000000000000001445202303300220255ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-script-only/MANIFEST.in000066400000000000000000000000571445202303300235650ustar00rootroot00000000000000include src/dummy include src/rundemo-template python-versioneer-0.29/test/demoapp-script-only/README000066400000000000000000000000101445202303300226740ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoapp-script-only/fake.py000066400000000000000000000000011445202303300232740ustar00rootroot00000000000000 python-versioneer-0.29/test/demoapp-script-only/setup.cfg000066400000000000000000000002031445202303300236410ustar00rootroot00000000000000[versioneer] VCS = @VCS@ versionfile_source = src/demo/_version.py versionfile_build = tag_prefix = demo- parentdir_prefix = demo- python-versioneer-0.29/test/demoapp-script-only/setup.py000066400000000000000000000025331445202303300235420ustar00rootroot00000000000000 import os, tempfile from setuptools import setup from distutils.command.build_scripts import build_scripts import versioneer commands = versioneer.get_cmdclass().copy() class my_build_scripts(build_scripts): def run(self): versions = versioneer.get_versions() tempdir = tempfile.mkdtemp() generated = os.path.join(tempdir, "rundemo") with open(generated, "wb") as f: for line in open("src/rundemo-template", "rb"): if line.strip().decode("ascii") == "versions = None": f.write(('versions = %r\n' % (versions,)).encode("ascii")) else: f.write(line) self.scripts = [generated] rc = build_scripts.run(self) os.unlink(generated) os.rmdir(tempdir) return rc commands["build_scripts"] = my_build_scripts setup(name="demo", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, scripts=["src/dummy"], # this will be replaced by my_build_scripts # without py_modules= or packages=, distutils thinks this module is not # "pure", and will put a platform indicator in the .whl name even # though we call bdist_wheel with --universal. py_modules=["fake"], cmdclass=commands, ) python-versioneer-0.29/test/demoapp-script-only/src/000077500000000000000000000000001445202303300226145ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-script-only/src/demo/000077500000000000000000000000001445202303300235405ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-script-only/src/demo/placeholder000066400000000000000000000000001445202303300257330ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp-script-only/src/dummy000066400000000000000000000005321445202303300236720ustar00rootroot00000000000000This is a fake script, used to ensure that setup.py's scripts= list is non-empty. The real script is generated (by modifying a template) during the "setup.py build_scripts" command (run by "setup.py build"). If scripts= was empty, then build_scripts wouldn't get run (as distutils would mistakenly believe that this package produced no scripts). python-versioneer-0.29/test/demoapp-script-only/src/rundemo-template000066400000000000000000000002351445202303300260210ustar00rootroot00000000000000#!/usr/bin/env python versions = None print("__version__:%s" % versions['version']) for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) python-versioneer-0.29/test/demoapp/000077500000000000000000000000001445202303300175445ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp/MANIFEST.in000066400000000000000000000000001445202303300212700ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp/README000066400000000000000000000000101445202303300204130ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoapp/bin/000077500000000000000000000000001445202303300203145ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp/bin/rundemo000066400000000000000000000003671445202303300217160ustar00rootroot00000000000000#!/usr/bin/env python import demo from demo import _version print("__version__:%s" % demo.__version__) print("_version:%s" % str(_version)) versions = _version.get_versions() for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) python-versioneer-0.29/test/demoapp/setup.cfg000066400000000000000000000002251445202303300213640ustar00rootroot00000000000000 [versioneer] VCS = @VCS@ versionfile_source = src/demo/_version.py versionfile_build = demo/_version.py tag_prefix = demo- parentdir_prefix = demo- python-versioneer-0.29/test/demoapp/setup.py000066400000000000000000000006031445202303300212550ustar00rootroot00000000000000 from setuptools import setup import versioneer commands = versioneer.get_cmdclass().copy() setup(name="demo", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, packages=["demo"], package_dir={"": "src"}, scripts=["bin/rundemo"], cmdclass=commands, ) python-versioneer-0.29/test/demoapp/src/000077500000000000000000000000001445202303300203335ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp/src/demo/000077500000000000000000000000001445202303300212575ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp/src/demo/__init__.py000066400000000000000000000000011445202303300233570ustar00rootroot00000000000000 python-versioneer-0.29/test/demoapp2-setuptools-subproject/000077500000000000000000000000001445202303300242235ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/000077500000000000000000000000001445202303300264035ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/MANIFEST.in000066400000000000000000000000001445202303300301270ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/README000066400000000000000000000000101445202303300272520ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/setup.cfg000066400000000000000000000002331445202303300302220ustar00rootroot00000000000000 [versioneer] VCS = git versionfile_source = src/demo/_version.py versionfile_build = demo/_version.py tag_prefix = demoapp2- parentdir_prefix = demoapp2- python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/setup.py000066400000000000000000000007571445202303300301260ustar00rootroot00000000000000 from setuptools import setup import versioneer commands = versioneer.get_cmdclass().copy() setup(name="demoapp2", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, packages=["demo"], package_dir={"": "src"}, entry_points={ 'console_scripts': [ 'rundemo = demo.main:run' ], }, install_requires=["demolib==1.0"], cmdclass=commands, ) python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/src/000077500000000000000000000000001445202303300271725ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/src/demo/000077500000000000000000000000001445202303300301165ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/src/demo/__init__.py000066400000000000000000000000011445202303300322160ustar00rootroot00000000000000 python-versioneer-0.29/test/demoapp2-setuptools-subproject/subproject/src/demo/main.py000066400000000000000000000005711445202303300314170ustar00rootroot00000000000000#!/usr/bin/env python import demo from demo import _version from demolib import __version__ as libversion def run(*args, **kwargs): print("__version__:%s" % demo.__version__) print("_version:%s" % str(_version)) versions = _version.get_versions() for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) print("demolib:%s" % libversion) python-versioneer-0.29/test/demoapp2-setuptools/000077500000000000000000000000001445202303300220455ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools/MANIFEST.in000066400000000000000000000000001445202303300235710ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools/README000066400000000000000000000000101445202303300227140ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoapp2-setuptools/setup.cfg000066400000000000000000000002331445202303300236640ustar00rootroot00000000000000 [versioneer] VCS = git versionfile_source = src/demo/_version.py versionfile_build = demo/_version.py tag_prefix = demoapp2- parentdir_prefix = demoapp2- python-versioneer-0.29/test/demoapp2-setuptools/setup.py000066400000000000000000000011221445202303300235530ustar00rootroot00000000000000 from setuptools import setup import versioneer commands = versioneer.get_cmdclass().copy() # Updating our updated commands should be safe commands = versioneer.get_cmdclass(commands).copy() setup(name="demoapp2", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, packages=["demo"], package_dir={"": "src"}, entry_points={ 'console_scripts': [ 'rundemo = demo.main:run' ], }, install_requires=["demolib==1.0"], cmdclass=commands, ) python-versioneer-0.29/test/demoapp2-setuptools/src/000077500000000000000000000000001445202303300226345ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools/src/demo/000077500000000000000000000000001445202303300235605ustar00rootroot00000000000000python-versioneer-0.29/test/demoapp2-setuptools/src/demo/__init__.py000066400000000000000000000000011445202303300256600ustar00rootroot00000000000000 python-versioneer-0.29/test/demoapp2-setuptools/src/demo/main.py000066400000000000000000000005711445202303300250610ustar00rootroot00000000000000#!/usr/bin/env python import demo from demo import _version from demolib import __version__ as libversion def run(*args, **kwargs): print("__version__:%s" % demo.__version__) print("_version:%s" % str(_version)) versions = _version.get_versions() for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) print("demolib:%s" % libversion) python-versioneer-0.29/test/demoappext-setuptools/000077500000000000000000000000001445202303300225045ustar00rootroot00000000000000python-versioneer-0.29/test/demoappext-setuptools/MANIFEST.in000066400000000000000000000000001445202303300242300ustar00rootroot00000000000000python-versioneer-0.29/test/demoappext-setuptools/README000066400000000000000000000000101445202303300233530ustar00rootroot00000000000000read me python-versioneer-0.29/test/demoappext-setuptools/demo/000077500000000000000000000000001445202303300234305ustar00rootroot00000000000000python-versioneer-0.29/test/demoappext-setuptools/demo/__init__.py000066400000000000000000000000011445202303300255300ustar00rootroot00000000000000 python-versioneer-0.29/test/demoappext-setuptools/demo/ext.c000066400000000000000000003325301445202303300244020ustar00rootroot00000000000000/* Generated by Cython 0.29.32 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN #endif /* PY_SSIZE_T_CLEAN */ #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else #define CYTHON_ABI "0_29_32" #define CYTHON_HEX_VERSION 0x001D20F0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #if PY_VERSION_HEX >= 0x02070000 #define HAVE_LONG_LONG #endif #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #ifdef PYPY_VERSION #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_NOGIL 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) #endif #elif defined(PYSTON_VERSION) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_NOGIL 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(PY_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_NOGIL 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #define CYTHON_COMPILING_IN_NOGIL 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #elif !defined(CYTHON_USE_PYLONG_INTERNALS) #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #if PY_VERSION_HEX >= 0x030B00A4 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #elif !defined(CYTHON_FAST_THREAD_STATE) #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030A0000) #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) #endif #ifndef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) #endif #if PY_VERSION_HEX >= 0x030B00A4 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #elif !defined(CYTHON_USE_EXC_INFO_STACK) #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) #endif #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS #if PY_MAJOR_VERSION < 3 #include "longintrepr.h" #endif #undef SHIFT #undef BASE #undef MASK #ifdef SIZEOF_VOID_P enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; #endif #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } # else # define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int32 uint32_t; #endif #endif #else #include #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) && __cplusplus >= 201103L #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #elif __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__ ) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #elif defined(__GNUC__) #define CYTHON_INLINE __inline__ #elif defined(_MSC_VER) #define CYTHON_INLINE __inline #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_INLINE inline #else #define CYTHON_INLINE #endif #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) #define Py_OptimizeFlag 0 #endif #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_DefaultClassType PyType_Type #if PY_VERSION_HEX >= 0x030B00A1 static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, PyObject *code, PyObject *c, PyObject* n, PyObject *v, PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; const char *fn_cstr=NULL; const char *name_cstr=NULL; PyCodeObject* co=NULL; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); if (!(kwds=PyDict_New())) goto end; if (!(argcount=PyLong_FromLong(a))) goto end; if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; if (!(posonlyargcount=PyLong_FromLong(0))) goto end; if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; if (!(nlocals=PyLong_FromLong(l))) goto end; if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; if (!(stacksize=PyLong_FromLong(s))) goto end; if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; if (!(flags=PyLong_FromLong(f))) goto end; if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; Py_XDECREF((PyObject*)co); co = (PyCodeObject*)call_result; call_result = NULL; if (0) { cleanup_code_too: Py_XDECREF((PyObject*)co); co = NULL; } end: Py_XDECREF(kwds); Py_XDECREF(argcount); Py_XDECREF(posonlyargcount); Py_XDECREF(kwonlyargcount); Py_XDECREF(nlocals); Py_XDECREF(stacksize); Py_XDECREF(replace); Py_XDECREF(call_result); Py_XDECREF(empty); if (type) { PyErr_Restore(type, value, traceback); } return co; } #else #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #endif #define __Pyx_DefaultClassType PyType_Type #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #ifndef METH_STACKLESS #define METH_STACKLESS 0 #endif #if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_FAST_PYCCALL #define __Pyx_PyFastCFunction_Check(func)\ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) #else #define __Pyx_PyFastCFunction_Check(func) 0 #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 #define PyMem_RawMalloc(n) PyMem_Malloc(n) #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) #define PyMem_RawFree(p) PyMem_Free(p) #endif #if CYTHON_COMPILING_IN_PYSTON #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x03060000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) #include "pythread.h" #define Py_tss_NEEDS_INIT 0 typedef int Py_tss_t; static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { *key = PyThread_create_key(); return 0; } static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); *key = Py_tss_NEEDS_INIT; return key; } static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { PyObject_Free(key); } static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { return *key != Py_tss_NEEDS_INIT; } static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { PyThread_delete_key(*key); *key = Py_tss_NEEDS_INIT; } static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { return PyThread_set_key_value(*key, value); } static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { return PyThread_get_key_value(*key); } #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #else #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS #define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) #else #define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) #endif #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #if defined(PyUnicode_IS_READY) #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #else #define __Pyx_PyUnicode_READY(op) (0) #endif #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) #else #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #endif #else #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) #endif #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #ifndef PyObject_Unicode #define PyObject_Unicode PyObject_Str #endif #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif #if PY_VERSION_HEX >= 0x030900A4 #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) #else #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) #endif #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) #else #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) #endif #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif #ifndef __Pyx_PyAsyncMethodsStruct typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #endif #if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) #if !defined(_USE_MATH_DEFINES) #define _USE_MATH_DEFINES #endif #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" #else #define __PYX_EXTERN_C extern #endif #endif #define __PYX_HAVE__demo__ext #define __PYX_HAVE_API__demo__ext /* Early includes */ #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { return (size_t) i < (size_t) limit; } #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; static const char *__pyx_filename; static const char *__pyx_f[] = { "ext.pyx", }; /*--- Type declarations ---*/ /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); void (*DECREF)(void*, PyObject*, int); void (*GOTREF)(void*, PyObject*, int); void (*GIVEREF)(void*, PyObject*, int); void* (*SetupContext)(const char*, int, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #ifdef WITH_THREAD #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil)\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) #endif #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* PyObjectCall.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /* PyDictVersioning.proto */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS #define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) #define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ (version_var) = __PYX_GET_DICT_VERSION(dict);\ (cache_var) = (value); #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ static PY_UINT64_T __pyx_dict_version = 0;\ static PyObject *__pyx_dict_cached_value = NULL;\ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ (VAR) = __pyx_dict_cached_value;\ } else {\ (VAR) = __pyx_dict_cached_value = (LOOKUP);\ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ }\ } static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); #else #define __PYX_GET_DICT_VERSION(dict) (0) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); #endif /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* CLineInTraceback.proto */ #ifdef CYTHON_CLINE_IN_TRACEBACK #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #else static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #endif /* CodeObjectCache.proto */ typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* GCCDiagnostics.proto */ #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) #define __Pyx_HAS_GCC_DIAGNOSTIC #endif /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) #endif #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /* Module declarations from 'demo.ext' */ #define __Pyx_MODULE_NAME "demo.ext" extern int __pyx_module_is_main_demo__ext; int __pyx_module_is_main_demo__ext = 0; /* Implementation of 'demo.ext' */ static PyObject *__pyx_builtin_print; static const char __pyx_k_run[] = "run"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_name[] = "__name__"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_print[] = "print"; static const char __pyx_k_ext_pyx[] = "ext.pyx"; static const char __pyx_k_demo_ext[] = "demo.ext"; static const char __pyx_k_Extension_code[] = "Extension code"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static PyObject *__pyx_kp_u_Extension_code; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_demo_ext; static PyObject *__pyx_kp_s_ext_pyx; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_name; static PyObject *__pyx_n_s_print; static PyObject *__pyx_n_s_run; static PyObject *__pyx_n_s_test; static PyObject *__pyx_pf_4demo_3ext_run(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ static PyObject *__pyx_tuple_; static PyObject *__pyx_codeobj__2; /* Late includes */ /* "demo/ext.pyx":3 * from __future__ import print_function * * def run(): # <<<<<<<<<<<<<< * print("Extension code") */ /* Python wrapper */ static PyObject *__pyx_pw_4demo_3ext_1run(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyMethodDef __pyx_mdef_4demo_3ext_1run = {"run", (PyCFunction)__pyx_pw_4demo_3ext_1run, METH_NOARGS, 0}; static PyObject *__pyx_pw_4demo_3ext_1run(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("run (wrapper)", 0); __pyx_r = __pyx_pf_4demo_3ext_run(__pyx_self); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_4demo_3ext_run(CYTHON_UNUSED PyObject *__pyx_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("run", 0); /* "demo/ext.pyx":4 * * def run(): * print("Extension code") # <<<<<<<<<<<<<< */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_print, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "demo/ext.pyx":3 * from __future__ import print_function * * def run(): # <<<<<<<<<<<<<< * print("Extension code") */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("demo.ext.run", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; #if PY_MAJOR_VERSION >= 3 #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec_ext(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec_ext}, {0, NULL} }; #endif static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, "ext", 0, /* m_doc */ #if CYTHON_PEP489_MULTI_PHASE_INIT 0, /* m_size */ #else -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) #define CYTHON_SMALL_CODE __attribute__((cold)) #else #define CYTHON_SMALL_CODE #endif #endif static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_kp_u_Extension_code, __pyx_k_Extension_code, sizeof(__pyx_k_Extension_code), 0, 1, 0, 0}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_demo_ext, __pyx_k_demo_ext, sizeof(__pyx_k_demo_ext), 0, 0, 1, 1}, {&__pyx_kp_s_ext_pyx, __pyx_k_ext_pyx, sizeof(__pyx_k_ext_pyx), 0, 0, 1, 0}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, {&__pyx_n_s_print, __pyx_k_print, sizeof(__pyx_k_print), 0, 0, 1, 1}, {&__pyx_n_s_run, __pyx_k_run, sizeof(__pyx_k_run), 0, 0, 1, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_print = __Pyx_GetBuiltinName(__pyx_n_s_print); if (!__pyx_builtin_print) __PYX_ERR(0, 4, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "demo/ext.pyx":4 * * def run(): * print("Extension code") # <<<<<<<<<<<<<< */ __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_Extension_code); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); /* "demo/ext.pyx":3 * from __future__ import print_function * * def run(): # <<<<<<<<<<<<<< * print("Extension code") */ __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_ext_pyx, __pyx_n_s_run, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 3, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ static int __Pyx_modinit_global_init_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); /*--- Global init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_variable_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); /*--- Variable export code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); /*--- Function export code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_type_init_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); /*--- Type init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_type_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); /*--- Type import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_variable_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); /*--- Variable import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); /*--- Function import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } #ifndef CYTHON_NO_PYINIT_EXPORT #define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC #elif PY_MAJOR_VERSION < 3 #ifdef __cplusplus #define __Pyx_PyMODINIT_FUNC extern "C" void #else #define __Pyx_PyMODINIT_FUNC void #endif #else #ifdef __cplusplus #define __Pyx_PyMODINIT_FUNC extern "C" PyObject * #else #define __Pyx_PyMODINIT_FUNC PyObject * #endif #endif #if PY_MAJOR_VERSION < 3 __Pyx_PyMODINIT_FUNC initext(void) CYTHON_SMALL_CODE; /*proto*/ __Pyx_PyMODINIT_FUNC initext(void) #else __Pyx_PyMODINIT_FUNC PyInit_ext(void) CYTHON_SMALL_CODE; /*proto*/ __Pyx_PyMODINIT_FUNC PyInit_ext(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { #if PY_VERSION_HEX >= 0x030700A1 static PY_INT64_T main_interpreter_id = -1; PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); if (main_interpreter_id == -1) { main_interpreter_id = current_id; return (unlikely(current_id == -1)) ? -1 : 0; } else if (unlikely(main_interpreter_id != current_id)) #else static PyInterpreterState *main_interpreter = NULL; PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; if (!main_interpreter) { main_interpreter = current_interpreter; } else if (unlikely(main_interpreter != current_interpreter)) #endif { PyErr_SetString( PyExc_ImportError, "Interpreter change detected - this module can only be loaded into one interpreter per process."); return -1; } return 0; } static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { if (allow_none || value != Py_None) { result = PyDict_SetItemString(moddict, to_name, value); } Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; if (__Pyx_check_single_interpreter()) return NULL; if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static CYTHON_SMALL_CODE int __pyx_pymod_exec_ext(PyObject *__pyx_pyinit_module) #endif #endif { PyObject *__pyx_t_1 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m) { if (__pyx_m == __pyx_pyinit_module) return 0; PyErr_SetString(PyExc_RuntimeError, "Module 'ext' has already been imported. Re-initialisation is not supported."); return -1; } #elif PY_MAJOR_VERSION >= 3 if (__pyx_m) return __Pyx_NewRef(__pyx_m); #endif #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_ext(void)", 0); if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pxy_PyFrame_Initialize_Offsets __Pxy_PyFrame_Initialize_Offsets(); #endif __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS PyEval_InitThreads(); #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_m = __pyx_pyinit_module; Py_INCREF(__pyx_m); #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("ext", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_d); __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_b); __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_cython_runtime); if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif if (__pyx_module_is_main_demo__ext) { if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "demo.ext")) { if (unlikely(PyDict_SetItemString(modules, "demo.ext", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(); (void)__Pyx_modinit_variable_export_code(); (void)__Pyx_modinit_function_export_code(); (void)__Pyx_modinit_type_init_code(); (void)__Pyx_modinit_type_import_code(); (void)__Pyx_modinit_variable_import_code(); (void)__Pyx_modinit_function_import_code(); /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /* "demo/ext.pyx":3 * from __future__ import print_function * * def run(): # <<<<<<<<<<<<<< * print("Extension code") */ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_4demo_3ext_1run, NULL, __pyx_n_s_demo_ext); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_run, __pyx_t_1) < 0) __PYX_ERR(0, 3, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "demo/ext.pyx":1 * from __future__ import print_function # <<<<<<<<<<<<<< * * def run(): */ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); if (__pyx_m) { if (__pyx_d) { __Pyx_AddTraceback("init demo.ext", __pyx_clineno, __pyx_lineno, __pyx_filename); } Py_CLEAR(__pyx_m); } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init demo.ext"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #elif PY_MAJOR_VERSION >= 3 return __pyx_m; #else return; #endif } /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule(modname); if (!m) goto end; p = PyObject_GetAttrString(m, "RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* PyObjectGetAttrStr */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #endif /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); if (unlikely(!result)) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /* PyObjectCall */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = Py_TYPE(func)->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyDictVersioning */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { PyObject *dict = Py_TYPE(obj)->tp_dict; return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; } static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { PyObject **dictptr = NULL; Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; if (offset) { #if CYTHON_COMPILING_IN_CPYTHON dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); #else dictptr = _PyObject_GetDictPtr(obj); #endif } return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; } static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { PyObject *dict = Py_TYPE(obj)->tp_dict; if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) return 0; return obj_dict_version == __Pyx_get_object_dict_version(obj); } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; } #endif /* CLineInTraceback */ #ifndef CYTHON_CLINE_IN_TRACEBACK static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif if (unlikely(!__pyx_cython_runtime)) { return c_line; } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); if (likely(cython_runtime_dict)) { __PYX_PY_DICT_LOOKUP_IF_MODIFIED( use_cline, *cython_runtime_dict, __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" #if PY_VERSION_HEX >= 0x030b00a6 #ifndef Py_BUILD_CORE #define Py_BUILD_CORE 1 #endif #include "internal/pycore_frame.h" #endif static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = NULL; PyObject *py_funcname = NULL; #if PY_MAJOR_VERSION < 3 PyObject *py_srcfile = NULL; py_srcfile = PyString_FromString(filename); if (!py_srcfile) goto bad; #endif if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); if (!py_funcname) goto bad; #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); if (!py_funcname) goto bad; funcname = PyUnicode_AsUTF8(py_funcname); if (!funcname) goto bad; #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); if (!py_funcname) goto bad; #endif } #if PY_MAJOR_VERSION < 3 py_code = __Pyx_PyCode_New( 0, 0, 0, 0, 0, __pyx_empty_bytes, /*PyObject *code,*/ __pyx_empty_tuple, /*PyObject *consts,*/ __pyx_empty_tuple, /*PyObject *names,*/ __pyx_empty_tuple, /*PyObject *varnames,*/ __pyx_empty_tuple, /*PyObject *freevars,*/ __pyx_empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline return py_code; bad: Py_XDECREF(py_funcname); #if PY_MAJOR_VERSION < 3 Py_XDECREF(py_srcfile); #endif return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject *ptype, *pvalue, *ptraceback; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) { /* If the code object creation fails, then we should clear the fetched exception references and propagate the new exception */ Py_XDECREF(ptype); Py_XDECREF(pvalue); Py_XDECREF(ptraceback); goto bad; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); } } /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(long) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (long) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(long) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) case -2: if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } #endif if (sizeof(long) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (long) -1; } } else { long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (long) -1; val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const int neg_one = (int) -1, const_zero = (int) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(int) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(int) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) case -2: if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } #endif if (sizeof(int) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (int) -1; } } else { int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (int) -1; val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = a->tp_base; if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } #if PY_MAJOR_VERSION == 2 static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { PyObject *exception, *value, *tb; int res; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&exception, &value, &tb); res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } if (!res) { res = PyObject_IsSubclass(err, exc_type2); if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } } __Pyx_ErrRestore(exception, value, tb); return res; } #else static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; if (!res) { res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } return res; } #endif static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; assert(PyExceptionClass_Check(exc_type)); n = PyTuple_GET_SIZE(tuple); #if PY_MAJOR_VERSION >= 3 for (i=0; i '9'); break; } if (rt_from_call[i] != ctversion[i]) { same = 0; break; } } if (!same) { char rtversion[5] = {'\0'}; char message[200]; for (i=0; i<4; ++i) { if (rt_from_call[i] == '.') { if (found_dot) break; found_dot = 1; } else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') { break; } rtversion[i] = rt_from_call[i]; } PyOS_snprintf(message, sizeof(message), "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); return PyErr_WarnEx(NULL, message, 1); } return 0; } /* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); } else if (t->encoding) { *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); } else { *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); } #endif if (!*t->p) return -1; if (PyObject_Hash(*t->p) == -1) return -1; ++t; } return 0; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT #if !CYTHON_PEP393_ENABLED static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { char* defenc_c; PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { PyUnicode_AsASCIIString(o); return NULL; } } } #endif *length = PyBytes_GET_SIZE(defenc); return defenc_c; } #else static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif } #endif #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { int retval; if (unlikely(!x)) return -1; retval = __Pyx_PyObject_IsTrue(x); Py_DECREF(x); return retval; } static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { #if PY_MAJOR_VERSION >= 3 if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type %.200s). " "The ability to return an instance of a strict subclass of int " "is deprecated, and may be removed in a future version of Python.", Py_TYPE(result)->tp_name)) { Py_DECREF(result); return NULL; } return result; } #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type %.200s)", type_name, type_name, Py_TYPE(result)->tp_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x) || PyLong_Check(x))) #else if (likely(PyLong_Check(x))) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; res = m->nb_long(x); } #else if (likely(m && m->nb_int)) { name = "int"; res = m->nb_int(x); } #endif #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Int(x); } #endif if (likely(res)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else if (unlikely(!PyLong_CheckExact(res))) { #endif return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(b); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)b)->ob_digit; const Py_ssize_t size = Py_SIZE(b); if (likely(__Pyx_sst_abs(size) <= 1)) { ival = likely(size) ? digits[0] : 0; if (size == -1) ival = -ival; return ival; } else { switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); #if PY_MAJOR_VERSION < 3 } else if (likely(PyInt_CheckExact(o))) { return PyInt_AS_LONG(o); #endif } else { Py_ssize_t ival; PyObject *x; x = PyNumber_Index(o); if (!x) return -1; ival = PyInt_AsLong(x); Py_DECREF(x); return ival; } } static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } #endif /* Py_PYTHON_H */ python-versioneer-0.29/test/demoappext-setuptools/demo/ext.pyx000066400000000000000000000001161445202303300247700ustar00rootroot00000000000000from __future__ import print_function def run(): print("Extension code") python-versioneer-0.29/test/demoappext-setuptools/demo/main.py000066400000000000000000000005711445202303300247310ustar00rootroot00000000000000#!/usr/bin/env python import demo from demo import _version from demolib import __version__ as libversion def run(*args, **kwargs): print("__version__:%s" % demo.__version__) print("_version:%s" % str(_version)) versions = _version.get_versions() for k in sorted(versions.keys()): print("%s:%s" % (k,versions[k])) print("demolib:%s" % libversion) python-versioneer-0.29/test/demoappext-setuptools/setup.cfg000066400000000000000000000002331445202303300243230ustar00rootroot00000000000000 [versioneer] VCS = git versionfile_source = demo/_version.py versionfile_build = demo/_version.py tag_prefix = demoappext- parentdir_prefix = demoappext- python-versioneer-0.29/test/demoappext-setuptools/setup.py000066400000000000000000000011601445202303300242140ustar00rootroot00000000000000 from setuptools import setup, Extension import versioneer commands = versioneer.get_cmdclass().copy() extension = Extension('demo.ext', sources=['demo/ext.c'], ) setup(name="demoappext", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, packages=["demo"], # package_dir={"": "src"}, entry_points={ 'console_scripts': [ 'rundemo = demo.main:run' ], }, install_requires=["demolib==1.0"], cmdclass=commands, ext_modules=[extension], ) python-versioneer-0.29/test/demolib/000077500000000000000000000000001445202303300175325ustar00rootroot00000000000000python-versioneer-0.29/test/demolib/MANIFEST.in000066400000000000000000000000001445202303300212560ustar00rootroot00000000000000python-versioneer-0.29/test/demolib/README000066400000000000000000000000101445202303300204010ustar00rootroot00000000000000read me python-versioneer-0.29/test/demolib/setup.cfg000066400000000000000000000002371445202303300213550ustar00rootroot00000000000000 [versioneer] VCS = git versionfile_source = src/demolib/_version.py versionfile_build = demolib/_version.py tag_prefix = demolib- parentdir_prefix = demolib- python-versioneer-0.29/test/demolib/setup.py000066400000000000000000000005521445202303300212460ustar00rootroot00000000000000 from setuptools import setup import versioneer commands = versioneer.get_cmdclass().copy() setup(name="demolib", version=versioneer.get_version(), description="Demo", url="url", author="author", author_email="email", zip_safe=True, packages=["demolib"], package_dir={"": "src"}, cmdclass=commands, ) python-versioneer-0.29/test/demolib/src/000077500000000000000000000000001445202303300203215ustar00rootroot00000000000000python-versioneer-0.29/test/demolib/src/demolib/000077500000000000000000000000001445202303300217345ustar00rootroot00000000000000python-versioneer-0.29/test/demolib/src/demolib/__init__.py000066400000000000000000000000011445202303300240340ustar00rootroot00000000000000 python-versioneer-0.29/test/git/000077500000000000000000000000001445202303300167025ustar00rootroot00000000000000python-versioneer-0.29/test/git/common.py000066400000000000000000000034511445202303300205470ustar00rootroot00000000000000import os, sys import stat import shutil from subprocess_helper import run_command GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] class Common: def command(self, cmd, *args, **kwargs): workdir = kwargs.pop("workdir", self.projdir) assert not kwargs, kwargs.keys() output, rc = run_command([cmd], list(args), workdir, True) if output is None: self.fail("problem running command %s" % cmd) return output def git(self, *args, **kwargs): workdir = kwargs.pop("workdir", self.gitdir) assert not kwargs, kwargs.keys() env = os.environ.copy() env["EMAIL"] = "foo@example.com" env["GIT_AUTHOR_NAME"] = "foo" env["GIT_COMMITTER_NAME"] = "foo" output, rc = run_command(GITS, args=list(args), cwd=workdir, verbose=True, env=env) if output is None: self.fail("problem running git (workdir: %s)" % workdir) return output def python(self, *args, **kwargs): workdir = kwargs.pop("workdir", self.projdir) exe = kwargs.pop("python", sys.executable) assert not kwargs, kwargs.keys() output, rc = run_command([exe], list(args), workdir, True) if output is None: self.fail("problem running python (workdir: %s)" % workdir) return output def project_file(self, *path): return os.path.join(self.projdir, *path) def subpath(self, *path): return os.path.join(self.testdir, *path) def rmtree(self, path): # rm -rf # Found on https://stackoverflow.com/a/1889686 def remove_readonly(func, path, excinfo): os.chmod(path, stat.S_IWRITE) func(path) shutil.rmtree(path, onerror=remove_readonly) python-versioneer-0.29/test/git/test_git.py000066400000000000000000000770201445202303300211040ustar00rootroot00000000000000#! /usr/bin/python import os, sys import posixpath import shutil import tarfile import unittest import tempfile import re from unittest import mock from pathlib import Path from pkg_resources import parse_version sys.path.insert(0, "src") import common from render import render from git import from_vcs, from_keywords from subprocess_helper import run_command DEFAULT_PYPROJECT = """\ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" """ class ParseGitDescribe(unittest.TestCase): def setUp(self): self.fakeroot = tempfile.mkdtemp() self.fakegit = os.path.join(self.fakeroot, ".git") os.mkdir(self.fakegit) def test_pieces(self): def pv(git_describe, do_error=False, expect_pieces=False, branch_name="master"): def fake_run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): if args[0] == "describe": if do_error == "describe": return None, 0 return git_describe+"\n", 0 if args[0] == "rev-parse": if do_error == "rev-parse": return None, 0 if args[1] == "--abbrev-ref": return "%s\n" % branch_name, 0 else: return "longlong\n", 0 if args[0] == "rev-list": return ">hashhashhashhashhashhashhashhash\n" * 42, 0 if args[0] == "show": if do_error == "show": return "gpg: signature\n12345\n", 0 return "12345\n", 0 if args[0] == "branch": return "* (no branch)\n" \ " contained-branch-1\n" \ " contained-branch-2", 0 self.fail("git called in weird way: %s" % (args,)) return from_vcs.git_pieces_from_vcs( "v", self.fakeroot, verbose=False, runner=fake_run_command) self.assertRaises(from_vcs.NotThisMethod, pv, "ignored", do_error="describe") self.assertRaises(from_vcs.NotThisMethod, pv, "ignored", do_error="rev-parse") self.assertEqual(pv("1f"), {"closest-tag": None, "dirty": False, "error": None, "distance": 42, "long": "longlong", "short": "longlon", "date": "12345", "branch": "master"}) self.assertEqual(pv("1f", do_error="show"), {"closest-tag": None, "dirty": False, "error": None, "distance": 42, "long": "longlong", "short": "longlon", "date": "12345", "branch": "master"}) self.assertEqual(pv("1f-dirty"), {"closest-tag": None, "dirty": True, "error": None, "distance": 42, "long": "longlong", "short": "longlon", "date": "12345", "branch": "master"}) self.assertEqual(pv("v1.0-0-g1f"), {"closest-tag": "1.0", "dirty": False, "error": None, "distance": 0, "long": "longlong", "short": "1f", "date": "12345", "branch": "master"}) self.assertEqual(pv("v1.0-0-g1f-dirty"), {"closest-tag": "1.0", "dirty": True, "error": None, "distance": 0, "long": "longlong", "short": "1f", "date": "12345", "branch": "master"}) self.assertEqual(pv("v1.0-1-g1f"), {"closest-tag": "1.0", "dirty": False, "error": None, "distance": 1, "long": "longlong", "short": "1f", "date": "12345", "branch": "master"}) self.assertEqual(pv("v1.0-1-g1f-dirty"), {"closest-tag": "1.0", "dirty": True, "error": None, "distance": 1, "long": "longlong", "short": "1f", "date": "12345", "branch": "master"}) self.assertEqual(pv("v1.0-1-g1f-dirty", branch_name="feature-branch"), {"closest-tag": "1.0", "dirty": True, "error": None, "distance": 1, "long": "longlong", "short": "1f", "date": "12345", "branch": "feature-branch"}) self.assertEqual(pv("v1.0-1-g1f-dirty", branch_name="HEAD"), {"closest-tag": "1.0", "dirty": True, "error": None, "distance": 1, "long": "longlong", "short": "1f", "date": "12345", "branch": "contained-branch-1"}) def tearDown(self): os.rmdir(self.fakegit) os.rmdir(self.fakeroot) class Keywords(unittest.TestCase): def parse(self, refnames, full, prefix="", date=None): return from_keywords.git_versions_from_keywords( {"refnames": refnames, "full": full, "date": date}, prefix, False) def test_parse(self): v = self.parse(" (HEAD, 2.0,master , otherbranch ) ", " full ") self.assertEqual(v["version"], "2.0") self.assertEqual(v["full-revisionid"], "full") self.assertEqual(v["dirty"], False) self.assertEqual(v["error"], None) self.assertEqual(v["date"], None) def test_prefer_short(self): v = self.parse(" (HEAD, 2.0rc1, 2.0, 2.0rc2) ", " full ") self.assertEqual(v["version"], "2.0") self.assertEqual(v["full-revisionid"], "full") self.assertEqual(v["dirty"], False) self.assertEqual(v["error"], None) self.assertEqual(v["date"], None) def test_prefix(self): v = self.parse(" (HEAD, projectname-2.0) ", " full ", "projectname-") self.assertEqual(v["version"], "2.0") self.assertEqual(v["full-revisionid"], "full") self.assertEqual(v["dirty"], False) self.assertEqual(v["error"], None) self.assertEqual(v["date"], None) def test_unexpanded(self): self.assertRaises(from_keywords.NotThisMethod, self.parse, " $Format$ ", " full ", "projectname-") def test_no_tags(self): v = self.parse("(HEAD, master)", "full") self.assertEqual(v["version"], "0+unknown") self.assertEqual(v["full-revisionid"], "full") self.assertEqual(v["dirty"], False) self.assertEqual(v["error"], "no suitable tags") self.assertEqual(v["date"], None) def test_no_prefix(self): v = self.parse("(HEAD, master, 1.23)", "full", "missingprefix-") self.assertEqual(v["version"], "0+unknown") self.assertEqual(v["full-revisionid"], "full") self.assertEqual(v["dirty"], False) self.assertEqual(v["error"], "no suitable tags") self.assertEqual(v["date"], None) def test_date(self): date = "2017-07-24 16:03:40 +0200" result = "2017-07-24T16:03:40+0200" v = self.parse(" (HEAD, 2.0,master , otherbranch ) ", " full ", date=date) self.assertEqual(v["date"], result) def test_date_gpg(self): date = """ gpg: Signature information gpg: ... 2017-07-24 16:03:40 +0200""" result = "2017-07-24T16:03:40+0200" v = self.parse(" (HEAD, 2.0,master , otherbranch ) ", " full ", date=date) self.assertEqual(v["date"], result) expected_renders = """ closest-tag: 1.0 distance: 0 dirty: False branch: feature pep440: 1.0 pep440-branch: 1.0 pep440-pre: 1.0 pep440-post: 1.0 pep440-post-branch: 1.0 pep440-old: 1.0 git-describe: 1.0 git-describe-long: 1.0-0-g250b7ca closest-tag: 1.0 distance: 0 dirty: True branch: master pep440: 1.0+0.g250b7ca.dirty pep440-branch: 1.0+0.g250b7ca.dirty pep440-pre: 1.0 pep440-post: 1.0.post0.dev0+g250b7ca pep440-post-branch: 1.0.post0+g250b7ca.dirty pep440-old: 1.0.post0.dev0 git-describe: 1.0-dirty git-describe-long: 1.0-0-g250b7ca-dirty closest-tag: 1.0 distance: 1 dirty: False branch: master pep440: 1.0+1.g250b7ca pep440-branch: 1.0+1.g250b7ca pep440-pre: 1.0.post0.dev1 pep440-post: 1.0.post1+g250b7ca pep440-post-branch: 1.0.post1+g250b7ca pep440-old: 1.0.post1 git-describe: 1.0-1-g250b7ca git-describe-long: 1.0-1-g250b7ca closest-tag: 1.0 distance: 1 dirty: True branch: feature pep440: 1.0+1.g250b7ca.dirty pep440-branch: 1.0.dev0+1.g250b7ca.dirty pep440-pre: 1.0.post0.dev1 pep440-post: 1.0.post1.dev0+g250b7ca pep440-post-branch: 1.0.post1.dev0+g250b7ca.dirty pep440-old: 1.0.post1.dev0 git-describe: 1.0-1-g250b7ca-dirty git-describe-long: 1.0-1-g250b7ca-dirty closest-tag: 1.0+plus distance: 1 dirty: False branch: feature pep440: 1.0+plus.1.g250b7ca pep440-branch: 1.0+plus.dev0.1.g250b7ca pep440-pre: 1.0+plus.post0.dev1 pep440-post: 1.0+plus.post1.g250b7ca pep440-post-branch: 1.0+plus.post1.dev0.g250b7ca pep440-old: 1.0+plus.post1 git-describe: 1.0+plus-1-g250b7ca git-describe-long: 1.0+plus-1-g250b7ca closest-tag: 1.0+plus distance: 1 dirty: True branch: master pep440: 1.0+plus.1.g250b7ca.dirty pep440-branch: 1.0+plus.1.g250b7ca.dirty pep440-pre: 1.0+plus.post0.dev1 pep440-post: 1.0+plus.post1.dev0.g250b7ca pep440-post-branch: 1.0+plus.post1.g250b7ca.dirty pep440-old: 1.0+plus.post1.dev0 git-describe: 1.0+plus-1-g250b7ca-dirty git-describe-long: 1.0+plus-1-g250b7ca-dirty closest-tag: None distance: 1 dirty: False branch: master pep440: 0+untagged.1.g250b7ca pep440-branch: 0+untagged.1.g250b7ca pep440-pre: 0.post0.dev1 pep440-post: 0.post1+g250b7ca pep440-post-branch: 0.post1+g250b7ca pep440-old: 0.post1 git-describe: 250b7ca git-describe-long: 250b7ca closest-tag: None distance: 1 dirty: True branch: feature pep440: 0+untagged.1.g250b7ca.dirty pep440-branch: 0.dev0+untagged.1.g250b7ca.dirty pep440-pre: 0.post0.dev1 pep440-post: 0.post1.dev0+g250b7ca pep440-post-branch: 0.post1.dev0+g250b7ca.dirty pep440-old: 0.post1.dev0 git-describe: 250b7ca-dirty git-describe-long: 250b7ca-dirty """ class RenderPieces(unittest.TestCase): def do_render(self, pieces): out = {} for style in ["pep440", "pep440-branch", "pep440-pre", "pep440-post", "pep440-post-branch", "pep440-old", "git-describe", "git-describe-long"]: out[style] = render(pieces, style)["version"] DEFAULT = "pep440" self.assertEqual(render(pieces, ""), render(pieces, DEFAULT)) self.assertEqual(render(pieces, "default"), render(pieces, DEFAULT)) return out def parse_expected(self): base_pieces = {"long": "250b7ca731388d8f016db2e06ab1d6289486424b", "short": "250b7ca", "error": None} more_pieces = {} expected = {} for line in expected_renders.splitlines(): line = line.strip() if not line: if more_pieces and expected: pieces = base_pieces.copy() pieces.update(more_pieces) yield (pieces, expected) more_pieces = {} expected = {} continue name, value = line.split(":") name = name.strip() value = value.strip() if name == "distance": more_pieces["distance"] = int(value) elif name == "dirty": more_pieces["dirty"] = bool(value.lower() == "true") elif name == "closest-tag": more_pieces["closest-tag"] = value if value == "None": more_pieces["closest-tag"] = None elif name == "branch": more_pieces["branch"] = value else: expected[name] = value if more_pieces and expected: pieces = base_pieces.copy() pieces.update(more_pieces) yield (pieces, expected) def test_render(self): for (pieces, expected) in self.parse_expected(): got = self.do_render(pieces) for key in expected: self.assertEqual(got[key], expected[key], (pieces, key, got[key], expected[key])) VERBOSE = False class Repo(common.Common, unittest.TestCase): # There are six tree states we're interested in: # S1: sitting on the initial commit, no tags # S2: dirty tree after the initial commit # S3: sitting on the 1.0 tag # S4: dirtying the tree after 1.0 # S5: making a new commit after 1.0, clean tree # S6: dirtying the tree after the post-1.0 commit # # Then we're interested in 6 kinds of trees: # TA: source tree (with .git) # TB: source tree without .git (should get 'unknown') # TC: source tree without .git unpacked into prefixdir # TD: git-archive tarball # TE: unpacked sdist tarball (python setup.py sdist) # TF: unpacked sdist tarball (python -m build -s) # # In three runtime situations: # RA1: setup.py --version # RA2: ...path/to/setup.py --version (from outside the source tree) # RB: setup.py build; rundemo --version # # We can only detect dirty files in real git trees, so we don't examine # S2/S4/S6 for TB/TC/TD/TE, or RB. # note that the repo being manipulated is always named "demoapp", # regardless of which source directory we copied it from (test/demoapp/ # or test/demoapp-script-only/) def test_full(self): self.run_test("test/demoapp", False, ".", False) def test_script_only(self): # This test looks at an application that consists entirely of a # script: no libraries (so its setup.py has packages=[]). This sort # of app cannot be run from source: you must 'setup.py build' to get # anything executable. So of the 3 runtime situations examined by # Repo.test_full above, we only care about RB. (RA1 is valid too, but # covered by Repo). self.run_test("test/demoapp-script-only", True, ".", False) def test_project_in_subdir(self): # This test sets of the git repository so that the python project -- # i.e. setup.py -- is not located in the root directory self.run_test("test/demoapp", False, "project", False) def test_no_tag_prefix(self): self.run_test("test/demoapp", False, ".", False, tag_prefix="") def test_pyproject(self): self.run_test("test/demoapp-pyproject", False, ".", True) def run_test(self, demoapp_dir, script_only, project_sub_dir, pep518, tag_prefix=None): # The test dir should live under /tmp/ or /var/ or somewhere that # isn't the child of the versioneer repo's .git directory, since that # will confuse the tests that check what happens when there is no # .git parent. So if you change this to use a fixed directory (say, # when debugging problems), use /tmp/_test rather than ./_test . self.testdir = tempfile.mkdtemp() if VERBOSE: print("testdir: %s" % (self.testdir,)) if os.path.exists(self.testdir): self.rmtree(self.testdir) # Our tests run from a git repo that lives here. All self.git() # operations run from this directory unless overridden. self.gitdir = os.path.join(self.testdir, "demoapp") # Inside that git repo, the project (with setup.py, setup.cfg, and # versioneer.py) lives inside this directory. All self.python() and # self.command() operations run from this directory unless # overridden. self.project_sub_dir = project_sub_dir self.projdir = os.path.join(self.testdir, self.gitdir, self.project_sub_dir) # Extra repository to try to mess with GIT_DIR and check resilience self.extra_git_dir = os.path.join(self.testdir, "extra_git") os.mkdir(self.testdir) os.mkdir(self.extra_git_dir) self.git("init", workdir=self.extra_git_dir) shutil.copytree(demoapp_dir, self.projdir) setup_cfg_fn = self.project_file("setup.cfg") if os.path.exists(setup_cfg_fn): with open(setup_cfg_fn, "r") as f: setup_cfg = f.read() setup_cfg = setup_cfg.replace("@VCS@", "git") tag_prefix_regex = "tag_prefix = (.*)" if tag_prefix is None: tag_prefix = re.search(tag_prefix_regex, setup_cfg).group(1) else: setup_cfg = re.sub(tag_prefix_regex, f"tag_prefix = {tag_prefix}", setup_cfg) with open(setup_cfg_fn, "w") as f: f.write(setup_cfg) if pep518: # Set test versioneer build-system.requires entry to @ file:/// # so that `python -m build` picks it up. pyproject_path = Path(self.project_file("pyproject.toml")) versioneer_source_root = Path(__file__).absolute().parent.parent.parent vsr = str(versioneer_source_root).replace("\\", "/") # For testing on Windows... pyproject_toml = pyproject_path.read_text() pyproject_toml = pyproject_toml.replace("@REPOROOT@", f"file://{vsr}") # Update versioneer config pyproject_toml = pyproject_toml.replace("@VCS@", "git") tag_prefix_regex = 'tag_prefix = "(.*)"' if tag_prefix is None: tag_prefix = re.search(tag_prefix_regex, pyproject_toml).group(1) else: pyproject_toml = re.sub(tag_prefix_regex, f'tag_prefix = "{tag_prefix}"', pyproject_toml) pyproject_path.write_text(pyproject_toml) else: shutil.copyfile("versioneer.py", self.project_file("versioneer.py")) self.git("init") self.git("add", "--all") self.git("commit", "-m", "comment") full = self.git("rev-parse", "HEAD") v = self.python("setup.py", "--version") self.assertEqual(v, "0+untagged.1.g%s" % full[:7]) v = self.python(self.project_file("setup.py"), "--version", workdir=self.testdir) self.assertEqual(v, "0+untagged.1.g%s" % full[:7]) if pep518: out = self.python("-m", "versioneer", "install", "--no-vendor").splitlines() else: out = self.python("versioneer.py", "setup").splitlines() self.assertEqual(out[0], "creating src/demo/_version.py") init = os.path.join("src/demo", "__init__.py") if script_only: self.assertEqual(out[1], f" {init} doesn't exist, ok") else: self.assertEqual(out[1], f" appending to {init}") # Many folks have a ~/.gitignore with ignores .pyc files, but if they # don't, it will show up in the status here. Ignore it. def remove_pyc(s): return [f for f in s if not (f.startswith("?? ") and (f.endswith(".pyc") or f.endswith("__pycache__/"))) ] out = set(remove_pyc(self.git("status", "--porcelain").splitlines())) def pf(fn): return posixpath.normpath(posixpath.join(self.project_sub_dir, fn)) expected = {"A %s" % pf(".gitattributes"), "A %s" % pf("src/demo/_version.py"), } if not script_only: expected.add("M %s" % pf("src/demo/__init__.py")) self.assertEqual(out, expected) if not script_only: with open(self.project_file("src/demo/__init__.py")) as fobj: i = fobj.read().splitlines() self.assertEqual(i[-2], "from . import _version") self.assertEqual(i[-1], "__version__ = _version.get_versions()['version']") self.git("commit", "-m", "add _version stuff") # "versioneer.py setup" should be idempotent if pep518: out = self.python("-m", "versioneer", "install", "--no-vendor").splitlines() else: out = self.python("versioneer.py", "setup").splitlines() self.assertEqual(out[0], "creating src/demo/_version.py") if script_only: self.assertEqual(out[1], f" {init} doesn't exist, ok") else: self.assertEqual(out[1], f" {init} unmodified") out = set(remove_pyc(self.git("status", "--porcelain").splitlines())) self.assertEqual(out, set()) UNABLE = "unable to compute version" NOTAG = "no suitable tags" # S1: the tree is sitting on a pre-tagged commit full = self.git("rev-parse", "HEAD") short = "0+untagged.2.g%s" % full[:7] self.do_checks("S1", {"TA": [short, full, False, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, False, None], "TD": ["0+unknown", full, False, NOTAG], "TE": [short, full, False, None], "TF": [short, full, False, None], }) # TD: expanded keywords only tell us about tags and full revisionids, # not how many patches we are beyond a tag. So any TD git-archive # tarball from a non-tagged version will give us an error. "dirty" is # False, since the tree from which the tarball was created is # necessarily clean. # S2: dirty the pre-tagged tree with open(self.project_file("setup.py"), "a") as fobj: fobj.write("# dirty\n") full = self.git("rev-parse", "HEAD") short = "0+untagged.2.g%s.dirty" % full[:7] self.do_checks("S2", {"TA": [short, full, True, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, True, None], "TD": ["0+unknown", full, False, NOTAG], "TE": [short, full, True, None], "TF": [short, full, True, None], }) # S3: we commit that change, then make the first tag (1.0) self.git("add", self.project_file("setup.py")) self.git("commit", "-m", "dirty") self.git("tag", f"{tag_prefix}1.0") # also add an unrelated tag, to test exclusion. git-describe appears # to return the highest lexicographically-sorted tag, so make sure # the unrelated one sorts earlier self.git("tag", "aaa-999") full = self.git("rev-parse", "HEAD") short = "1.0" if VERBOSE: print("FULL %s" % full) # the tree is now sitting on the 1.0 tag self.do_checks("S3", {"TA": [short, full, False, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, False, None], "TD": [short, full, False, None], "TE": [short, full, False, None], "TF": [short, full, False, None], }) # S4: now we dirty the tree with open(self.project_file("setup.py"), "a") as fobj: fobj.write("# dirty\n") full = self.git("rev-parse", "HEAD") short = "1.0+0.g%s.dirty" % full[:7] self.do_checks("S4", {"TA": [short, full, True, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, True, None], "TD": ["1.0", full, False, None], "TE": [short, full, True, None], "TF": [short, full, True, None], }) # S5: now we make one commit past the tag self.git("add", self.project_file("setup.py")) self.git("commit", "-m", "dirty") full = self.git("rev-parse", "HEAD") short = "1.0+1.g%s" % full[:7] self.do_checks("S5", {"TA": [short, full, False, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, False, None], "TD": ["0+unknown", full, False, NOTAG], "TE": [short, full, False, None], "TF": [short, full, False, None], }) # S6: dirty the post-tag tree with open(self.project_file("setup.py"), "a") as fobj: fobj.write("# more dirty\n") full = self.git("rev-parse", "HEAD") short = "1.0+1.g%s.dirty" % full[:7] self.do_checks("S6", {"TA": [short, full, True, None], "TB": ["0+unknown", None, None, UNABLE], "TC": [short, full, True, None], "TD": ["0+unknown", full, False, NOTAG], "TE": [short, full, True, None], "TF": [short, full, True, None], }) def do_checks(self, state, exps): if os.path.exists(self.subpath("out")): self.rmtree(self.subpath("out")) # TA: project tree self.check_version(self.projdir, state, "TA", exps["TA"]) # TA2: GIT_DIR has no effect when in git repository GIT_DIR = os.path.join(self.extra_git_dir, ".git") with mock.patch.dict(os.environ, {"GIT_DIR": GIT_DIR}): self.check_version(self.projdir, state, "TA", exps["TA"]) # TB: .git-less copy of project tree target = self.subpath("out/demoapp-TB") shutil.copytree(self.projdir, target) if os.path.exists(os.path.join(target, ".git")): self.rmtree(os.path.join(target, ".git")) self.check_version(target, state, "TB", exps["TB"]) # TC: project tree in versionprefix-named parentdir target = self.subpath("out/demo-1.1") shutil.copytree(self.projdir, target) if os.path.exists(os.path.join(target, ".git")): self.rmtree(os.path.join(target, ".git")) self.check_version(target, state, "TC", ["1.1", None, False, None]) # XXX # TD: project subdir of an unpacked git-archive tarball target = self.subpath("out/TD/demoapp-TD") self.git("archive", "--format=tar", "--prefix=demoapp-TD/", "--output=../demo.tar", "HEAD") os.mkdir(self.subpath("out/TD")) with tarfile.TarFile(self.subpath("demo.tar")) as t: t.extractall(path=self.subpath("out/TD")) self.check_version(os.path.join(target, self.project_sub_dir), state, "TD", exps["TD"]) # TE: unpacked setup.py sdist tarball dist_path = os.path.join(self.projdir, "dist") if os.path.exists(dist_path): self.rmtree(dist_path) self.python("setup.py", "sdist", "--formats=tar") files = os.listdir(dist_path) self.assertTrue(len(files)==1, files) distfile = files[0] self.assertEqual(distfile, "demo-%s.tar" % exps["TE"][0]) fn = os.path.join(dist_path, distfile) os.mkdir(self.subpath("out/TE")) with tarfile.TarFile(fn) as t: t.extractall(path=self.subpath("out/TE")) target = self.subpath("out/TE/demo-%s" % exps["TE"][0]) self.assertTrue(os.path.isdir(target)) self.check_version(target, state, "TE", exps["TE"]) # TF: unpacked python -m build --sdist tarball pyproject_path = Path(self.projdir) / "pyproject.toml" if not pyproject_path.exists(): return dist_path = Path(self.projdir) / "dist" if dist_path.exists(): self.rmtree(dist_path) # --no-isolation makes this less finicky on slow/unreliable network # connections at the cost of being potentially less "normal". # We'll rely on Tox and CI to keep this environment isolated and # fresh. self.python("-m", "build", "--sdist", "--no-isolation") files = os.listdir(dist_path) self.assertTrue(len(files)==1, files) distfile = files[0] self.assertEqual(distfile, "demo-%s.tar.gz" % exps["TF"][0]) fn = os.path.join(dist_path, distfile) os.mkdir(self.subpath("out/TF")) with tarfile.open(fn) as t: t.extractall(path=self.subpath("out/TF")) target = self.subpath("out/TF/demo-%s" % exps["TF"][0]) self.assertTrue(os.path.isdir(target)) self.check_version(target, state, "TF", exps["TF"]) def check_version(self, workdir, state, tree, exps): exp_version, exp_full, exp_dirty, exp_error = exps if VERBOSE: print("== starting %s %s" % (state, tree)) # RA: setup.py --version if VERBOSE: # setup.py version invokes cmd_version, which uses verbose=True # and has more boilerplate. print(self.python("setup.py", "version", workdir=workdir)) # setup.py --version gives us get_version() with verbose=False. v = self.python("setup.py", "--version", workdir=workdir) self.compare(v, exp_version, state, tree, "RA1") self.assertPEP440(v, state, tree, "RA1") # and test again from outside the tree v = self.python(os.path.join(workdir, "setup.py"), "--version", workdir=self.testdir) self.compare(v, exp_version, state, tree, "RA2") self.assertPEP440(v, state, tree, "RA2") # RB: setup.py build; rundemo --version if os.path.exists(os.path.join(workdir, "build")): self.rmtree(os.path.join(workdir, "build")) self.python("setup.py", "build", "--build-lib=build/lib", "--build-scripts=build/lib", workdir=workdir) build_lib = os.path.join(workdir, "build", "lib") out = self.python("rundemo", "--version", workdir=build_lib) data = dict(line.split(":",1) for line in out.splitlines()) self.compare(data["__version__"], exp_version, state, tree, "RB") self.assertPEP440(data["__version__"], state, tree, "RB") self.compare(data["version"], exp_version, state, tree, "RB") self.compare(data["dirty"], str(exp_dirty), state, tree, "RB") self.compare(data["full-revisionid"], str(exp_full), state, tree, "RB") self.compare(data["error"], str(exp_error), state, tree, "RB") def compare(self, got, expected, state, tree, runtime): where = "/".join([state, tree, runtime]) self.assertEqual(got, expected, "%s: got '%s' != expected '%s'" % (where, got, expected)) if VERBOSE: print(" good %s" % where) def assertPEP440(self, got, state, tree, runtime): where = "/".join([state, tree, runtime]) pv = parse_version(got) # rather than using an undocumented API, setuptools dev recommends this self.assertFalse("Legacy" in pv.__class__.__name__, "%s: '%s' was not pep440-compatible" % (where, got)) self.assertEqual(str(pv), got, "%s: '%s' pep440-normalized to '%s'" % (where, got, str(pv))) if __name__ == '__main__': ver, rc = run_command(common.GITS, ["--version"], ".", True) print("git --version: %s" % ver.strip()) unittest.main() python-versioneer-0.29/test/git/test_invocations.py000066400000000000000000000634751445202303300226660ustar00rootroot00000000000000import os, sys, shutil, unittest, tempfile, tarfile, warnings from wheel.bdist_wheel import get_abi_tag, get_platform from packaging.tags import interpreter_name, interpreter_version sys.path.insert(0, "src") import common pyver_major = "py%d" % sys.version_info[0] pyver = "py%d.%d" % sys.version_info[:2] # For binary wheels with native code impl, impl_ver = interpreter_name(), interpreter_version() abi = get_abi_tag() try: plat = get_platform(None) except TypeError: # wheel < 0.34 plat = get_platform() class _Invocations(common.Common): def setUp(self): if False: # when debugging, put the generated files in a predictable place self.testdir = os.path.abspath("t") if os.path.exists(self.testdir): return os.mkdir(self.testdir) else: self.testdir = tempfile.mkdtemp() os.mkdir(self.subpath("cache")) os.mkdir(self.subpath("cache", "setuptools")) self.gitdir = None self.projdir = None def make_venv(self, mode): if not os.path.exists(self.subpath("venvs")): os.mkdir(self.subpath("venvs")) venv_dir = self.subpath(os.path.join("venvs", mode)) # python3 on OS-X uses a funky two-part executable and an environment # variable to communicate between them. If this variable is still set # by the time a virtualenv's 'pip' or 'python' is run, and if that # command spawns another sys.executable underneath it, that second # child may use the wrong python, and can install things into the # real system library instead of the virtualenv. Invoking # virtualenv.create_environment() clears this as a side-effect, but # to make things safe I'll just clear this now. See # https://github.com/pypa/virtualenv/issues/322 and # https://bugs.python.org/issue22490 for some hints. I tried # switching to 'venv' on py3, but only py3.4 includes pip, and even # then it's an ancient version. os.environ.pop("__PYVENV_LAUNCHER__", None) # virtualenv causes DeprecationWarning/ResourceWarning on py3 with warnings.catch_warnings(): warnings.simplefilter("ignore") self.python('-m', 'virtualenv', venv_dir, workdir=self.testdir) self.run_in_venv(venv_dir, venv_dir, 'pip', 'install', '-U', 'pip', 'wheel', 'packaging') return venv_dir def get_venv_bin(self, venv, command): if sys.platform == "win32": return os.path.join(venv, "Scripts", command) return os.path.join(venv, "bin", command) def run_in_venv(self, venv, workdir, command, *args, use_python=False): bin_args = [self.get_venv_bin(venv, command)] pybin = self.get_venv_bin(venv, "python") if command == "pip": bin_args = [pybin, "-m", "pip"] args = ["--isolated", "--no-cache-dir"] + list(args) elif command == "rundemo" and use_python: bin_args = [pybin] + bin_args return self.command(*bin_args, *args, workdir=workdir) def check_in_venv(self, venv, use_python=False): out = self.run_in_venv(venv, venv, "rundemo", use_python=use_python) v = dict(line.split(":", 1) for line in out.splitlines()) self.assertEqual(v["version"], "2.0") return v def check_in_venv_withlib(self, venv): v = self.check_in_venv(venv) self.assertEqual(v["demolib"], "1.0") # "demolib" has a version of 1.0 and is built with setuptools # "demoapp2-setuptools" is v2.0, uses setuptools, and depends on demolib # repos and unpacked git-archive tarballs come in two flavors: normal (in # which the setup.py/setup.cfg/versioneer.py files live in the root of # the source tree), and "subproject" (where they live in a subdirectory). # sdists are always "normal" (although they might have come from either # normal or subproject -style source trees), and wheels/eggs don't have # these files at all. # TODO: git-archive subproject-flavor def make_demolib_sdist(self): # create an sdist of demolib-1.0 . for the *lib*, we only use the # tarball, never the repo. demolib_sdist = self.subpath("cache", "demolib-1.0.tar") if os.path.exists(demolib_sdist): return demolib_sdist libdir = self.subpath("build-demolib") shutil.copytree("test/demolib", libdir) shutil.copy("versioneer.py", libdir) self.git("init", workdir=libdir) self.python("versioneer.py", "setup", workdir=libdir) self.git("add", "--all", workdir=libdir) self.git("commit", "-m", "comment", workdir=libdir) self.git("tag", "demolib-1.0", workdir=libdir) self.python("setup.py", "sdist", "--format=tar", workdir=libdir) created = os.path.join(libdir, "dist", "demolib-1.0.tar") self.assertTrue(os.path.exists(created)) shutil.copyfile(created, demolib_sdist) return demolib_sdist def make_linkdir(self): # create/populate a fake pypi directory for use with --find-links linkdir = self.subpath("linkdir") if os.path.exists(linkdir): return linkdir os.mkdir(linkdir) demolib_sdist = self.make_demolib_sdist() shutil.copy(demolib_sdist, linkdir) return linkdir def make_empty_indexdir(self): indexdir = self.subpath("indexdir") if os.path.exists(indexdir): return indexdir os.mkdir(indexdir) return indexdir def make_setuptools_repo(self): # create a clean repo of demoapp2-setuptools at 2.0 repodir = self.subpath("demoapp2-setuptools-repo") if os.path.exists(repodir): shutil.rmtree(repodir) shutil.copytree("test/demoapp2-setuptools", repodir) shutil.copy("versioneer.py", repodir) self.git("init", workdir=repodir) self.python("versioneer.py", "setup", workdir=repodir) self.git("add", "--all", workdir=repodir) self.git("commit", "-m", "comment", workdir=repodir) self.git("tag", "demoapp2-2.0", workdir=repodir) return repodir def make_setuptools_extension_unpacked(self): sdist = self.make_setuptools_extension_sdist() unpack_into = self.subpath("demoappext-setuptools-unpacked") if os.path.exists(unpack_into): shutil.rmtree(unpack_into) os.mkdir(unpack_into) with tarfile.TarFile(sdist) as t: t.extractall(path=unpack_into) unpacked = os.path.join(unpack_into, "demoappext-2.0") self.assertTrue(os.path.exists(unpacked)) return unpacked def make_setuptools_extension_sdist(self): # create an sdist tarball of demoappext-setuptools at 2.0 demoappext_setuptools_sdist = self.subpath("cache", "setuptools", "demoappext-2.0.tar") if os.path.exists(demoappext_setuptools_sdist): return demoappext_setuptools_sdist repodir = self.make_setuptools_extension_repo() self.python("setup.py", "sdist", "--format=tar", workdir=repodir) created = os.path.join(repodir, "dist", "demoappext-2.0.tar") self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoappext_setuptools_sdist) return demoappext_setuptools_sdist def make_setuptools_extension_repo(self): # create a clean repo of demoappext-setuptools at 2.0 repodir = self.subpath("demoappext-setuptools-repo") if os.path.exists(repodir): shutil.rmtree(repodir) # import ipdb; ipdb.set_trace() shutil.copytree("test/demoappext-setuptools", repodir) shutil.copy("versioneer.py", repodir) self.git("init", workdir=repodir) self.python("versioneer.py", "setup", workdir=repodir) self.git("add", "--all", workdir=repodir) self.git("commit", "-m", "comment", workdir=repodir) self.git("tag", "demoappext-2.0", workdir=repodir) return repodir def make_setuptools_repo_subproject(self): # create a clean repo of demoapp2-setuptools at 2.0 repodir = self.subpath("demoapp2-setuptools-repo-subproject") if os.path.exists(repodir): shutil.rmtree(repodir) shutil.copytree("test/demoapp2-setuptools-subproject", repodir) projectdir = os.path.join(repodir, "subproject") shutil.copy("versioneer.py", projectdir) self.git("init", workdir=repodir) self.python("versioneer.py", "setup", workdir=projectdir) self.git("add", "--all", workdir=repodir) self.git("commit", "-m", "comment", workdir=repodir) self.git("tag", "demoapp2-2.0", workdir=repodir) return projectdir def make_setuptools_sdist(self): # create an sdist tarball of demoapp2-setuptools at 2.0 demoapp2_setuptools_sdist = self.subpath("cache", "setuptools", "demoapp2-2.0.tar") if os.path.exists(demoapp2_setuptools_sdist): return demoapp2_setuptools_sdist repodir = self.make_setuptools_repo() self.python("setup.py", "sdist", "--format=tar", workdir=repodir) created = os.path.join(repodir, "dist", "demoapp2-2.0.tar") self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoapp2_setuptools_sdist) return demoapp2_setuptools_sdist def make_setuptools_sdist_subproject(self): demoapp2_setuptools_sdist = self.subpath("cache", "setuptools", "demoapp2-subproject-2.0.tar") if os.path.exists(demoapp2_setuptools_sdist): return demoapp2_setuptools_sdist projectdir = self.make_setuptools_repo_subproject() self.python("setup.py", "sdist", "--format=tar", workdir=projectdir) created = os.path.join(projectdir, "dist", "demoapp2-2.0.tar") self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoapp2_setuptools_sdist) return demoapp2_setuptools_sdist def make_setuptools_unpacked(self): sdist = self.make_setuptools_sdist() unpack_into = self.subpath("demoapp2-setuptools-unpacked") if os.path.exists(unpack_into): shutil.rmtree(unpack_into) os.mkdir(unpack_into) with tarfile.TarFile(sdist) as t: t.extractall(path=unpack_into) unpacked = os.path.join(unpack_into, "demoapp2-2.0") self.assertTrue(os.path.exists(unpacked)) return unpacked def make_setuptools_subproject_unpacked(self): sdist = self.make_setuptools_sdist_subproject() unpack_into = self.subpath("demoapp2-setuptools-unpacked-subproject") if os.path.exists(unpack_into): shutil.rmtree(unpack_into) os.mkdir(unpack_into) with tarfile.TarFile(sdist) as t: t.extractall(path=unpack_into) unpacked = os.path.join(unpack_into, "demoapp2-2.0") self.assertTrue(os.path.exists(unpacked)) return unpacked def make_setuptools_egg(self): # create an egg of demoapp2-setuptools at 2.0 demoapp2_setuptools_egg = self.subpath("cache", "setuptools", "demoapp2-2.0-%s.egg" % pyver) if os.path.exists(demoapp2_setuptools_egg): return demoapp2_setuptools_egg repodir = self.make_setuptools_repo() self.python("setup.py", "bdist_egg", workdir=repodir) created = os.path.join(repodir, "dist", "demoapp2-2.0-%s.egg" % pyver) self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoapp2_setuptools_egg) return demoapp2_setuptools_egg def make_setuptools_wheel_with_setup_py(self): # create an wheel of demoapp2-setuptools at 2.0 wheelname = "demoapp2-2.0-%s-none-any.whl" % pyver_major demoapp2_setuptools_wheel = self.subpath("cache", "setuptools", wheelname) if os.path.exists(demoapp2_setuptools_wheel): # there are two ways to make this .whl, and we need to exercise # both, so don't actually cache the results os.unlink(demoapp2_setuptools_wheel) repodir = self.make_setuptools_repo() self.python("setup.py", "bdist_wheel", workdir=repodir) created = os.path.join(repodir, "dist", wheelname) self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoapp2_setuptools_wheel) return demoapp2_setuptools_wheel def make_setuptools_wheel_with_pip(self): # create an wheel of demoapp2-setuptools at 2.0 wheelname = "demoapp2-2.0-%s-none-any.whl" % pyver_major demoapp2_setuptools_wheel = self.subpath("cache", "setuptools", wheelname) if os.path.exists(demoapp2_setuptools_wheel): # there are two ways to make this .whl, and we need to exercise # both, so don't actually cache the results os.unlink(demoapp2_setuptools_wheel) linkdir = self.make_linkdir() repodir = self.make_setuptools_repo() venv = self.make_venv("make-setuptools-wheel-with-pip") self.run_in_venv(venv, repodir, "pip", "wheel", "--wheel-dir", "wheelhouse", "--no-index", "--find-links", linkdir, ".") created = os.path.join(repodir, "wheelhouse", wheelname) self.assertTrue(os.path.exists(created), created) shutil.copyfile(created, demoapp2_setuptools_wheel) return demoapp2_setuptools_wheel def make_binary_wheelname(self, app): return "%s-2.0-%s-%s-%s.whl" % (app, "".join([impl, impl_ver]), abi, plat.replace("-", "_").replace(".", "_") ) class SetuptoolsRepo(_Invocations, unittest.TestCase): def test_install(self): repodir = self.make_setuptools_repo() demolib = self.make_demolib_sdist() venv = self.make_venv("setuptools-repo-install") # "setup.py install" doesn't take --no-index or --find-links, so we # pre-install the dependency self.run_in_venv(venv, venv, "pip", "install", demolib) self.run_in_venv(venv, repodir, "python", "setup.py", "install") self.check_in_venv_withlib(venv) def test_install_subproject(self): projectdir = self.make_setuptools_repo_subproject() demolib = self.make_demolib_sdist() venv = self.make_venv("setuptools-repo-install-subproject") # "setup.py install" doesn't take --no-index or --find-links, so we # pre-install the dependency self.run_in_venv(venv, venv, "pip", "install", demolib) self.run_in_venv(venv, projectdir, "python", "setup.py", "install") self.check_in_venv_withlib(venv) def test_develop(self): linkdir = self.make_linkdir() indexdir = self.make_empty_indexdir() repodir = self.make_setuptools_repo() venv = self.make_venv("setuptools-repo-develop") # "setup.py develop" takes --find-links and --index-url but not # --no-index self.run_in_venv(venv, repodir, "python", "setup.py", "develop", "--index-url", indexdir, "--find-links", linkdir, ) self.check_in_venv_withlib(venv) def test_develop_subproject(self): linkdir = self.make_linkdir() indexdir = self.make_empty_indexdir() projectdir = self.make_setuptools_repo_subproject() venv = self.make_venv("setuptools-repo-develop-subproject") # "setup.py develop" takes --find-links and --index-url but not # --no-index self.run_in_venv(venv, projectdir, "python", "setup.py", "develop", "--index-url", indexdir, "--find-links", linkdir, ) self.check_in_venv_withlib(venv) def test_egg(self): self.make_setuptools_egg() # asserts version as a side-effect def test_pip_wheel(self): self.make_setuptools_wheel_with_pip() # asserts version as a side-effect def test_bdist_wheel(self): self.make_setuptools_wheel_with_setup_py() # asserts version as a side-effect def test_sdist(self): sdist = self.make_setuptools_sdist() # asserts version as a side-effect # make sure we used setuptools/sdist, not distutils/sdist with tarfile.TarFile(sdist) as t: self.assertIn("demoapp2-2.0/src/demoapp2.egg-info/PKG-INFO", t.getnames()) def test_sdist_subproject(self): sdist = self.make_setuptools_sdist_subproject() # make sure we used setuptools/sdist, not distutils/sdist with tarfile.TarFile(sdist) as t: self.assertIn("demoapp2-2.0/src/demoapp2.egg-info/PKG-INFO", t.getnames()) def test_pip_install(self): linkdir = self.make_linkdir() repodir = self.make_setuptools_repo() venv = self.make_venv("setuptools-repo-pip-install") self.run_in_venv(venv, repodir, "pip", "install", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_subproject(self): linkdir = self.make_linkdir() projectdir = self.make_setuptools_repo_subproject() venv = self.make_venv("setuptools-repo-pip-install-subproject") self.run_in_venv(venv, projectdir, "pip", "install", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_from_afar(self): linkdir = self.make_linkdir() repodir = self.make_setuptools_repo() venv = self.make_venv("setuptools-repo-pip-install-from-afar") self.run_in_venv(venv, venv, "pip", "install", repodir, "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_from_afar_subproject(self): linkdir = self.make_linkdir() projectdir = self.make_setuptools_repo_subproject() venv = self.make_venv("setuptools-repo-pip-install-from-afar-subproject") self.run_in_venv(venv, venv, "pip", "install", projectdir, "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_editable(self): linkdir = self.make_linkdir() repodir = self.make_setuptools_repo() venv = self.make_venv("setuptools-repo-pip-install-editable") self.run_in_venv(venv, repodir, "pip", "install", "--editable", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_editable_subproject(self): linkdir = self.make_linkdir() projectdir = self.make_setuptools_repo_subproject() venv = self.make_venv("setuptools-repo-pip-install-editable-subproject") self.run_in_venv(venv, projectdir, "pip", "install", "--editable", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) class SetuptoolsSdist(_Invocations, unittest.TestCase): def test_pip_install(self): linkdir = self.make_linkdir() sdist = self.make_setuptools_sdist() venv = self.make_venv("setuptools-sdist-pip-install") self.run_in_venv(venv, venv, "pip", "install", "--no-index", "--find-links", linkdir, sdist) self.check_in_venv_withlib(venv) def test_pip_install_subproject(self): linkdir = self.make_linkdir() sdist = self.make_setuptools_sdist_subproject() venv = self.make_venv("setuptools-sdist-pip-install-subproject") self.run_in_venv(venv, venv, "pip", "install", "--no-index", "--find-links", linkdir, sdist) self.check_in_venv_withlib(venv) class SetuptoolsWheel(_Invocations, unittest.TestCase): def test_pip_install(self): linkdir = self.make_linkdir() wheel = self.make_setuptools_wheel_with_setup_py() venv = self.make_venv("setuptools-wheel-pip-install") self.run_in_venv(venv, venv, "pip", "install", "--no-index", "--find-links", linkdir, wheel) self.check_in_venv_withlib(venv) class SetuptoolsUnpacked(_Invocations, unittest.TestCase): def test_install(self): unpacked = self.make_setuptools_unpacked() demolib = self.make_demolib_sdist() venv = self.make_venv("setuptools-unpacked-install") # "setup.py install" doesn't take --no-index or --find-links, so we # pre-install the dependency self.run_in_venv(venv, venv, "pip", "install", demolib) self.run_in_venv(venv, unpacked, "python", "setup.py", "install") self.check_in_venv_withlib(venv) def test_install_subproject(self): unpacked = self.make_setuptools_subproject_unpacked() demolib = self.make_demolib_sdist() venv = self.make_venv("setuptools-subproject-unpacked-install") # "setup.py install" doesn't take --no-index or --find-links, so we # pre-install the dependency self.run_in_venv(venv, venv, "pip", "install", demolib) self.run_in_venv(venv, unpacked, "python", "setup.py", "install") self.check_in_venv_withlib(venv) def test_wheel(self): unpacked = self.make_setuptools_unpacked() self.python("setup.py", "bdist_wheel", workdir=unpacked) wheelname = "demoapp2-2.0-%s-none-any.whl" % pyver_major wheel = os.path.join(unpacked, "dist", wheelname) self.assertTrue(os.path.exists(wheel)) def test_pip_wheel(self): unpacked = self.make_setuptools_unpacked() linkdir = self.make_linkdir() wheelname = "demoapp2-2.0-%s-none-any.whl" % pyver_major venv = self.make_venv("setuptools-unpacked-pip-wheel") self.run_in_venv(venv, unpacked, "pip", "wheel", "--wheel-dir", "wheelhouse", "--no-index", "--find-links", linkdir, ".") created = os.path.join(unpacked, "wheelhouse", wheelname) self.assertTrue(os.path.exists(created), created) def test_pip_install(self): linkdir = self.make_linkdir() repodir = self.make_setuptools_unpacked() venv = self.make_venv("setuptools-unpacked-pip-install") self.run_in_venv(venv, repodir, "pip", "install", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_subproject(self): linkdir = self.make_linkdir() unpacked = self.make_setuptools_subproject_unpacked() venv = self.make_venv("setuptools-subproject-unpacked-pip-install") self.run_in_venv(venv, unpacked, "pip", "install", ".", "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_pip_install_from_afar(self): linkdir = self.make_linkdir() repodir = self.make_setuptools_unpacked() venv = self.make_venv("setuptools-unpacked-pip-install-from-afar") self.run_in_venv(venv, venv, "pip", "install", repodir, "--no-index", "--find-links", linkdir) self.check_in_venv_withlib(venv) def test_extension_wheel_setuptools(self): # create an wheel of demoappext-setuptools at 2.0 wheelname = self.make_binary_wheelname('demoappext') demoappext_setuptools_wheel = self.subpath("cache", "setuptools", wheelname) if os.path.exists(demoappext_setuptools_wheel): # there are two ways to make this .whl, and we need to exercise # both, so don't actually cache the results os.unlink(demoappext_setuptools_wheel) repodir = self.make_setuptools_extension_repo() self.python("setup.py", "bdist_wheel", workdir=repodir) created = os.path.join(repodir, "dist", wheelname) self.assertTrue(os.path.exists(created), created) def test_extension_inplace(self): # build extensions in place. No wheel package unpacked = self.make_setuptools_extension_unpacked() venv = self.make_venv("setuptools-unpacked-pip-wheel-extension") self.run_in_venv(venv, unpacked, "python", "setup.py", "build_ext", "-i") # No wheel package is created, _version.py should exist in # module dir only version_file = os.path.join(unpacked, "demo", "_version.py") self.assertTrue(os.path.exists(version_file)) def test_extension_wheel_pip(self): # create an wheel of demoappext-setuptools at 2.0 with pip wheelname = self.make_binary_wheelname('demoappext') demoappext_setuptools_wheel = self.subpath("cache", "setuptools", wheelname) if os.path.exists(demoappext_setuptools_wheel): # there are two ways to make this .whl, and we need to exercise # both, so don't actually cache the results os.unlink(demoappext_setuptools_wheel) unpacked = self.make_setuptools_extension_unpacked() linkdir = self.make_linkdir() venv = self.make_venv("setuptools-unpacked-pip-wheel-extension") self.run_in_venv(venv, unpacked, "pip", "wheel", "--wheel-dir", "wheelhouse", "--no-index", "--find-links", linkdir, ".") created = os.path.join(unpacked, "wheelhouse", wheelname) self.assertTrue(os.path.exists(created), created) if __name__ == '__main__': unittest.main() python-versioneer-0.29/test/run_pyflakes_src.py000066400000000000000000000011261445202303300220420ustar00rootroot00000000000000import os from pyflakes.api import main def get_filenames(): for dirpath, dirnames, filenames in os.walk("src"): if dirpath.endswith("__pycache__"): continue for rel_fn in filenames: if not rel_fn.endswith(".py"): continue fn = os.path.join(dirpath, rel_fn) if fn in [os.path.join("src", "header.py"), os.path.join("src", "git", "long_header.py"), ]: continue print("pyflakes on:", fn) yield fn main(args=list(get_filenames())) python-versioneer-0.29/test/test_config.py000066400000000000000000000041731445202303300210020ustar00rootroot00000000000000import unittest import os, tempfile import configparser from versioneer import get_config_from_root base = """ [versioneer] VCS = git style = pep440 versionfile_source = petmail/_version.py versionfile_build = petmail/_version.py tag_prefix = v parentdir_prefix = petmail- """ class Parser(unittest.TestCase): def parse(self, contents): with tempfile.TemporaryDirectory() as root: with open(os.path.join(root, "setup.cfg"), "w") as f: f.write(contents) return get_config_from_root(root) def test_base(self): cfg = self.parse(base) self.assertEqual(cfg.VCS, "git") self.assertEqual(cfg.style, "pep440") self.assertEqual(cfg.versionfile_source, "petmail/_version.py") self.assertEqual(cfg.versionfile_build, "petmail/_version.py") self.assertEqual(cfg.tag_prefix, "v") self.assertEqual(cfg.parentdir_prefix, "petmail-") self.assertEqual(cfg.verbose, None) def test_empty(self): self.assertRaises(configparser.NoSectionError, self.parse, "") def test_mostly_empty(self): self.assertRaises(configparser.NoOptionError, self.parse, "[versioneer]\n") def test_minimal(self): cfg = self.parse("[versioneer]\nvcs = git\n") self.assertEqual(cfg.VCS, "git") self.assertEqual(cfg.style, "") self.assertEqual(cfg.versionfile_source, None) self.assertEqual(cfg.versionfile_build, None) self.assertEqual(cfg.tag_prefix, "") self.assertEqual(cfg.parentdir_prefix, None) self.assertEqual(cfg.verbose, None) def test_empty_tag_prefixes(self): # all three of these should give an empty tag_prefix: # tag_prefix = # tag_prefix = '' # tag_prefix = "" cfg = self.parse("[versioneer]\nVCS=git\ntag_prefix=") self.assertEqual(cfg.tag_prefix, "") cfg = self.parse("[versioneer]\nVCS=git\ntag_prefix=''") self.assertEqual(cfg.tag_prefix, "") cfg = self.parse("[versioneer]\nVCS=git\ntag_prefix=\"\"") self.assertEqual(cfg.tag_prefix, "") python-versioneer-0.29/test/test_file.py000066400000000000000000000013711445202303300204510ustar00rootroot00000000000000import os, tempfile, unittest from versioneer import versions_from_file class Parser(unittest.TestCase): def test_lf(self): with tempfile.TemporaryDirectory() as root: fn = os.path.join(root, "_version.py") with open(fn, "wb") as f: f.write(b"version_json = '''\n{}\n''' # END VERSION_JSON\n") data = versions_from_file(fn) self.assertEqual(data, {}) def test_crlf(self): with tempfile.TemporaryDirectory() as root: fn = os.path.join(root, "_version.py") with open(fn, "wb") as f: f.write(b"version_json = '''\r\n{}\r\n''' # END VERSION_JSON\r\n") data = versions_from_file(fn) self.assertEqual(data, {}) python-versioneer-0.29/test/test_render.py000066400000000000000000000315051445202303300210130ustar00rootroot00000000000000import unittest from versioneer import render class Testing_renderer_case_mixin: """ This is a mixin object which can be combined with a unittest.TestCase which defines a style and an expected dictionary. See Test_pep440 for and example. """ def define_pieces(self, closest_tag, distance=0, dirty=False, branch=False): return {"error": '', "closest-tag": closest_tag, "distance": distance, "dirty": dirty, "short": "abc" if distance else '', "long": "abcdefg" if distance else '', "date": "2016-05-31T13:02:11+0200", "branch": "feature" if branch else "master"} def assert_rendered(self, pieces, test_case_name): version = render(pieces, self.style)['version'] expected = self.expected[test_case_name] msg = ('Versions differ for {0} style with "{1}" case: expected {2}, ' 'got {3}'.format(self.style, test_case_name, expected, version)) self.assertEqual(version, expected, msg) # Naming structure: # test_(un)tagged__commits_(clean|dirty) def test_tagged_0_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3'), 'tagged_0_commits_clean') def test_tagged_1_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3', distance=1), 'tagged_1_commits_clean') def test_tagged_0_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3', dirty=True), 'tagged_0_commits_dirty') def test_tagged_1_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3', distance=1, dirty=True), 'tagged_1_commits_dirty') def test_untagged_0_commits_clean(self): self.assert_rendered(self.define_pieces(None), 'untagged_0_commits_clean') def test_untagged_1_commits_clean(self): self.assert_rendered(self.define_pieces(None, distance=1), 'untagged_1_commits_clean') def test_untagged_0_commits_dirty(self): self.assert_rendered(self.define_pieces(None, dirty=True), 'untagged_0_commits_dirty') def test_untagged_1_commits_dirty(self): self.assert_rendered(self.define_pieces(None, distance=1, dirty=True), 'untagged_1_commits_dirty') def test_error_getting_parts(self): self.assert_rendered({'error': 'Not a git repo'}, 'error_getting_parts') class Testing_branch_renderer_case_mixin(Testing_renderer_case_mixin): """ This is a mixin object which extends the base mixin and adds tests that also test on the value of the branch in the dictionary. """ # Naming structure: # test_branch_(un)tagged__commits_(clean|dirty) def test_branch_tagged_0_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3', branch=True), 'branch_tagged_0_commits_clean') def test_branch_tagged_1_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3', branch=True, distance=1), 'branch_tagged_1_commits_clean') def test_branch_tagged_0_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3', branch=True, dirty=True), 'branch_tagged_0_commits_dirty') def test_branch_tagged_1_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3', branch=True, distance=1, dirty=True), 'branch_tagged_1_commits_dirty') def test_branch_untagged_0_commits_clean(self): self.assert_rendered(self.define_pieces(None, branch=True), 'branch_untagged_0_commits_clean') def test_branch_untagged_1_commits_clean(self): self.assert_rendered(self.define_pieces(None, branch=True, distance=1), 'branch_untagged_1_commits_clean') def test_branch_untagged_0_commits_dirty(self): self.assert_rendered(self.define_pieces(None, branch=True, dirty=True), 'branch_untagged_0_commits_dirty') def test_branch_untagged_1_commits_dirty(self): self.assert_rendered(self.define_pieces(None, branch=True, distance=1, dirty=True), 'branch_untagged_1_commits_dirty') class Testing_post_renderer_case_mixin(Testing_renderer_case_mixin): """ This is a mixin object which extends the base mixin and adds tests that also test version tags with a post-release segment. """ # Naming structure: # test_(un)tagged_post__commits_(clean|dirty) def test_tagged_post_0_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3.post'), 'tagged_post_0_commits_clean') def test_tagged_post1_0_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3.post1'), 'tagged_post1_0_commits_clean') def test_tagged_post_1_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3.post', distance=1), 'tagged_post_1_commits_clean') def test_tagged_post1_1_commits_clean(self): self.assert_rendered(self.define_pieces('v1.2.3.post1', distance=1), 'tagged_post1_1_commits_clean') def test_tagged_post_0_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3.post', dirty=True), 'tagged_post_0_commits_dirty') def test_tagged_post1_0_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3.post1', dirty=True), 'tagged_post1_0_commits_dirty') def test_tagged_post_1_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3.post', distance=1, dirty=True), 'tagged_post_1_commits_dirty') def test_tagged_post1_1_commits_dirty(self): self.assert_rendered(self.define_pieces('v1.2.3.post1', distance=1, dirty=True), 'tagged_post1_1_commits_dirty') class Test_pep440(unittest.TestCase, Testing_renderer_case_mixin): style = 'pep440' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3+0.g.dirty', 'tagged_1_commits_clean': 'v1.2.3+1.gabc', 'tagged_1_commits_dirty': 'v1.2.3+1.gabc.dirty', 'untagged_0_commits_clean': '0+untagged.0.g', 'untagged_0_commits_dirty': '0+untagged.0.g.dirty', 'untagged_1_commits_clean': '0+untagged.1.gabc', 'untagged_1_commits_dirty': '0+untagged.1.gabc.dirty', 'error_getting_parts': 'unknown' } class Test_pep440_branch(unittest.TestCase, Testing_branch_renderer_case_mixin): style = 'pep440-branch' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3+0.g.dirty', 'tagged_1_commits_clean': 'v1.2.3+1.gabc', 'tagged_1_commits_dirty': 'v1.2.3+1.gabc.dirty', 'untagged_0_commits_clean': '0+untagged.0.g', 'untagged_0_commits_dirty': '0+untagged.0.g.dirty', 'untagged_1_commits_clean': '0+untagged.1.gabc', 'untagged_1_commits_dirty': '0+untagged.1.gabc.dirty', 'branch_tagged_0_commits_clean': 'v1.2.3', 'branch_tagged_0_commits_dirty': 'v1.2.3.dev0+0.g.dirty', 'branch_tagged_1_commits_clean': 'v1.2.3.dev0+1.gabc', 'branch_tagged_1_commits_dirty': 'v1.2.3.dev0+1.gabc.dirty', 'branch_untagged_0_commits_clean': '0.dev0+untagged.0.g', 'branch_untagged_0_commits_dirty': '0.dev0+untagged.0.g.dirty', 'branch_untagged_1_commits_clean': '0.dev0+untagged.1.gabc', 'branch_untagged_1_commits_dirty': '0.dev0+untagged.1.gabc.dirty', 'error_getting_parts': 'unknown' } class Test_pep440_old(unittest.TestCase, Testing_renderer_case_mixin): style = 'pep440-old' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3.post0.dev0', 'tagged_1_commits_clean': 'v1.2.3.post1', 'tagged_1_commits_dirty': 'v1.2.3.post1.dev0', 'untagged_0_commits_clean': '0.post0', 'untagged_0_commits_dirty': '0.post0.dev0', 'untagged_1_commits_clean': '0.post1', 'untagged_1_commits_dirty': '0.post1.dev0', 'error_getting_parts': 'unknown' } class Test_pep440_post(unittest.TestCase, Testing_renderer_case_mixin): style = 'pep440-post' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3.post0.dev0+g', 'tagged_1_commits_clean': 'v1.2.3.post1+gabc', 'tagged_1_commits_dirty': 'v1.2.3.post1.dev0+gabc', 'untagged_0_commits_clean': '0.post0+g', 'untagged_0_commits_dirty': '0.post0.dev0+g', 'untagged_1_commits_clean': '0.post1+gabc', 'untagged_1_commits_dirty': '0.post1.dev0+gabc', 'error_getting_parts': 'unknown' } class Test_pep440_post_branch(unittest.TestCase, Testing_branch_renderer_case_mixin): style = 'pep440-post-branch' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3.post0+g.dirty', 'tagged_1_commits_clean': 'v1.2.3.post1+gabc', 'tagged_1_commits_dirty': 'v1.2.3.post1+gabc.dirty', 'untagged_0_commits_clean': '0.post0+g', 'untagged_0_commits_dirty': '0.post0+g.dirty', 'untagged_1_commits_clean': '0.post1+gabc', 'untagged_1_commits_dirty': '0.post1+gabc.dirty', 'branch_tagged_0_commits_clean': 'v1.2.3', 'branch_tagged_0_commits_dirty': 'v1.2.3.post0.dev0+g.dirty', 'branch_tagged_1_commits_clean': 'v1.2.3.post1.dev0+gabc', 'branch_tagged_1_commits_dirty': 'v1.2.3.post1.dev0+gabc.dirty', 'branch_untagged_0_commits_clean': '0.post0.dev0+g', 'branch_untagged_0_commits_dirty': '0.post0.dev0+g.dirty', 'branch_untagged_1_commits_clean': '0.post1.dev0+gabc', 'branch_untagged_1_commits_dirty': '0.post1.dev0+gabc.dirty', 'error_getting_parts': 'unknown' } class Test_pep440_pre(unittest.TestCase, Testing_post_renderer_case_mixin): style = 'pep440-pre' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3', 'tagged_1_commits_clean': 'v1.2.3.post0.dev1', 'tagged_1_commits_dirty': 'v1.2.3.post0.dev1', 'untagged_0_commits_clean': '0.post0.dev0', 'untagged_0_commits_dirty': '0.post0.dev0', 'untagged_1_commits_clean': '0.post0.dev1', 'untagged_1_commits_dirty': '0.post0.dev1', 'tagged_post_0_commits_clean': 'v1.2.3.post', 'tagged_post1_0_commits_clean': 'v1.2.3.post1', 'tagged_post_1_commits_clean': 'v1.2.3.post1.dev1', 'tagged_post1_1_commits_clean': 'v1.2.3.post2.dev1', 'tagged_post_0_commits_dirty': 'v1.2.3.post', 'tagged_post1_0_commits_dirty': 'v1.2.3.post1', 'tagged_post_1_commits_dirty': 'v1.2.3.post1.dev1', 'tagged_post1_1_commits_dirty': 'v1.2.3.post2.dev1', 'error_getting_parts': 'unknown' } class Test_git_describe(unittest.TestCase, Testing_renderer_case_mixin): style = 'git-describe' expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3-dirty', 'tagged_1_commits_clean': 'v1.2.3-1-gabc', 'tagged_1_commits_dirty': 'v1.2.3-1-gabc-dirty', 'untagged_0_commits_clean': '', 'untagged_0_commits_dirty': '-dirty', 'untagged_1_commits_clean': 'abc', 'untagged_1_commits_dirty': 'abc-dirty', 'error_getting_parts': 'unknown' } if __name__ == '__main__': unittest.main() python-versioneer-0.29/tox.ini000066400000000000000000000027531445202303300164620ustar00rootroot00000000000000# Tox (https://tox.wiki/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] envlist = py{37,38,39,310,311,py3}-{linux,windows} skip_missing_interpreters = True [flake8] max-line-length = 88 [gh-actions] python = 3.7: py37 3.8: py38 3.9: py39 3.10: py310 3.11: py311 pypy-3.9: pypy3 [gh-actions:env] PLATFORM = ubuntu-latest: linux windows-latest: windows [testenv] deps = flake8 flake8-docstrings wheel>=0.35 setuptools>=50 virtualenv>=20 packaging>=20 pip>=20 build tomli; python_version < "3.11" types-setuptools !pypy3: mypy commands = pip --version virtualenv --version # this creates versioneer.py in the current directory, which is used by # tests python setup.py make_versioneer python -m unittest discover test python test/git/test_git.py -v python test/git/test_invocations.py -v # this creates git_version.py, which is used by tests python setup.py make_long_version_py_git pyflakes setup.py versioneer.py git_version.py # pyflakes all of src except src/git/long_header.py and src/header.py python test/run_pyflakes_src.py pyflakes test flake8 git_version.py versioneer.py pycodestyle --max-line-length=88 git_version.py versioneer.py !pypy3: mypy git_version.py versioneer.py src/installer.py setup.py