././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7424872 ecdsa-0.18.0/0000775005075200507520000000000014262276106012257 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1576509806.0 ecdsa-0.18.0/.coveragerc0000664005075200507520000000011313575720556014404 0ustar00hkariohkario# -*- conf -*- [run] include = src/ecdsa/* omit = src/ecdsa/_version.py ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1573067009.0 ecdsa-0.18.0/.gitattributes0000664005075200507520000000004313560614401015141 0ustar00hkariohkariosrc/ecdsa/_version.py export-subst ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1657371717.700489 ecdsa-0.18.0/.github/0000775005075200507520000000000014262276106013617 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7114885 ecdsa-0.18.0/.github/workflows/0000775005075200507520000000000014262276106015654 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657282872.0 ecdsa-0.18.0/.github/workflows/ci.yml0000664005075200507520000004311414262020470016764 0ustar00hkariohkarioname: GitHub CI on: push: branches: - master pull_request: jobs: test: runs-on: ${{ matrix.os }} container: ${{ matrix.container }} strategy: fail-fast: false matrix: include: - name: py2.6 os: ubuntu-latest container: centos:6 python-version: 2.6 - name: py3.10 with ossl3.0 os: ubuntu-latest container: ubuntu:22.04 python-version: "3.10" tox-env: py310 - name: py2.7 os: ubuntu-18.04 python-version: 2.7 tox-env: py27 - name: py2.7 with old gmpy os: ubuntu-18.04 python-version: 2.7 tox-env: py27_old_gmpy - name: py2.7 with old gmpy2 os: ubuntu-18.04 python-version: 2.7 tox-env: py27_old_gmpy2 - name: py2.7 with old six os: ubuntu-18.04 python-version: 2.7 tox-env: py27_old_six - name: py2.7 with gmpy os: ubuntu-18.04 python-version: 2.7 tox-env: gmpypy27 - name: py2.7 with gmpy2 os: ubuntu-18.04 python-version: 2.7 tox-env: gmpy2py27 - name: py3.3 os: ubuntu-18.04 python-version: 3.3 tox-env: py33 - name: py3.4 os: ubuntu-18.04 python-version: 3.4 tox-env: py34 - name: py3.5 os: ubuntu-18.04 python-version: 3.5 tox-env: py35 - name: py3.6 os: ubuntu-18.04 python-version: 3.6 tox-env: py36 - name: py3.7 os: ubuntu-latest python-version: 3.7 tox-env: py37 - name: py3.8 os: ubuntu-latest python-version: 3.8 tox-env: py38 - name: py3.9 os: ubuntu-latest python-version: 3.9 tox-env: py39 - name: py3.10 os: ubuntu-latest python-version: '3.10' tox-env: py310 - name: py3.10 with gmpy os: ubuntu-latest python-version: '3.10' tox-env: gmpypy310 - name: py3.10 with gmpy2 os: ubuntu-latest python-version: '3.10' tox-env: gmpy2py310 - name: py3.11 os: ubuntu-latest python-version: '3.11.0-beta.3' tox-env: py311 - name: pypy os: ubuntu-latest python-version: pypy-2.7 tox-env: pypy - name: pypy3 os: ubuntu-latest python-version: pypy-3.7 tox-env: pypy3 # special configurations - name: py2.7 with instrumental os: ubuntu-18.04 python-version: 2.7 opt-deps: ['instrumental'] - name: code checks os: ubuntu-latest python-version: 3.9 tox-env: codechecks steps: - uses: actions/checkout@v2 if: ${{ !matrix.container }} with: fetch-depth: 50 - uses: actions/checkout@v1 # centos 6 doesn't have glibc new enough for the nodejs used by v2 if: ${{ matrix.container }} with: fetch-depth: 50 - name: Ensure dependencies on CentOS if: ${{ matrix.container == 'centos:6' }} run: | ls /etc/yum.repos.d/ cat /etc/yum.repos.d/CentOS-Base.repo rm /etc/yum.repos.d/CentOS-Base.repo cat > /etc/yum.repos.d/CentOS-Base.repo <> $GITHUB_ENV - name: Create condition coverage badge uses: schneegans/dynamic-badges-action@v1.4.0 if: ${{ contains(matrix.opt-deps, 'instrumental') && !github.event.pull_request }} with: auth: ${{ secrets.GIST_SECRET }} gistID: 9b6ca1f3410207fbeca785a178781651 filename: python-ecdsa-condition-coverage.json label: condition coverage message: ${{ env.COND_COV }}% valColorRange: ${{ env.COND_COV }} maxColorRange: 100 minColorRange: 0 - name: Publish coverage to Coveralls if: ${{ !matrix.opt-deps && matrix.tox-env != 'codechecks' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_FLAG_NAME: ${{ matrix.name }} COVERALLS_PARALLEL: true COVERALLS_SERVICE_NAME: github run: coveralls coveralls: name: Indicate completion to coveralls.io needs: test runs-on: ubuntu-latest container: python:3-slim steps: - name: Install coveralls run: | pip3 install --upgrade coveralls - name: Send "finished" signal to coveralls env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_SERVICE_NAME: github run: | coveralls --finish ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1627555599.0 ecdsa-0.18.0/.gitignore0000664005075200507520000000067514100503417014244 0ustar00hkariohkario*.py[cod] MANIFEST htmlcov # C extensions *.so *.dylib # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 __pycache__ # Installer logs pip-log.txt # Other logs *.log # Unit test / coverage reports .coverage coverage-html .tox nosetests.xml t/ .hypothesis/ # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject #vscode .vscode # Backup files *.swp *~ .idea .cache ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1630493167.0 ecdsa-0.18.0/.gitleaks.toml0000664005075200507520000000014014113654757015037 0ustar00hkariohkario[allowlist] description = "Ignore private keys in test files" files = [ '''test_.*''' ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649087543.0 ecdsa-0.18.0/.readthedocs.yaml0000664005075200507520000000131414222612067015501 0ustar00hkariohkario# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the version of Python and other tools you might need build: os: ubuntu-20.04 tools: python: "3.9" # You can also specify other tool versions: # nodejs: "16" # rust: "1.55" # golang: "1.17" # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/source/conf.py # If using Sphinx, optionally build your docs in additional formats such as PDF # formats: # - pdf # Optionally declare the Python requirements required to build your docs python: install: - requirements: docs/requirements.txt ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611246732.0 ecdsa-0.18.0/.travis.yml0000664005075200507520000001226414002326214014361 0ustar00hkariohkario# workaround for 3.7 not available in default configuration # travis-ci/travis-ci#9815 dist: trusty sudo: false language: python cache: pip addons: apt_packages: # needed for gmpy and gmpy2 - libgmp-dev - libmpfr-dev - libmpc-dev before_cache: - rm -f $HOME/.cache/pip/log/debug.log # place the slowest (instrumental and py2.6) first matrix: include: - python: 2.7 env: INSTRUMENTAL=yes dist: bionic sudo: true - python: 2.6 env: TOX_ENV=py26 - python: 2.7 env: TOX_ENV=py27 - python: 2.7 env: TOX_ENV=py27_old_gmpy - python: 2.7 env: TOX_ENV=py27_old_gmpy2 - python: 2.7 env: TOX_ENV=py27_old_six - python: 2.7 env: TOX_ENV=gmpypy27 - python: 2.7 env: TOX_ENV=gmpy2py27 - python: 3.3 env: TOX_ENV=py33 - python: 3.4 env: TOX_ENV=py34 - python: 3.5 env: TOX_ENV=py35 - python: 3.6 env: TOX_ENV=py36 - python: 3.7 env: TOX_ENV=py37 dist: bionic sudo: true - python: 3.8 env: TOX_ENV=py38 dist: bionic sudo: true - python: 3.9 env: TOX_ENV=codechecks dist: bionic sudo: true - python: 3.9 env: TOX_ENV=py39 dist: bionic sudo: true - python: 3.9 env: TOX_ENV=gmpypy39 dist: bionic sudo: true - python: 3.9 env: TOX_ENV=gmpy2py39 dist: bionic sudo: true - python: nightly env: TOX_ENV=py dist: bionic sudo: true - python: pypy env: TOX_ENV=pypy - python: pypy3 env: TOX_ENV=pypy3 # We use explicit version as the PATH needs major-minor part - name: "Python3.8.0 on Windows" os: windows language: shell before_install: - choco install python --version 3.8.0 - python -m pip install --upgrade pip env: PATH=/c/Python38:/c/Python38/Scripts:$PATH install: - pip list - pip install six - pip install -r build-requirements.txt - pip list script: - coverage run --branch -m pytest src/ecdsa after_success: - coveralls allow_failures: - python: nightly # for instrumental we're checking if the coverage changed from base branch # so collect that info before_install: - | echo -e "TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST\n" \ "TRAVIS_REPO_SLUG=$TRAVIS_REPO_SLUG\n" \ "TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST\n" \ "TRAVIS_COMMIT=$TRAVIS_COMMIT\n" \ "TRAVIS_PYTHON_VERSION=$TRAVIS_PYTHON_VERSION" - | # workaround https://github.com/travis-ci/travis-ci/issues/2666 if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then URL="https://github.com/${TRAVIS_REPO_SLUG}/pull/${TRAVIS_PULL_REQUEST}.patch" # `--location` makes curl follow redirects PR_FIRST=$(curl --silent --show-error --location $URL | head -1 | grep -o -E '\b[0-9a-f]{40}\b' | tr -d '\n') TRAVIS_COMMIT_RANGE=$PR_FIRST^..$TRAVIS_COMMIT fi # sanity check current commit - BRANCH=$(git rev-parse HEAD) - echo "TRAVIS_COMMIT_RANGE=$TRAVIS_COMMIT_RANGE" - git fetch origin master:refs/remotes/origin/master install: - pip list - | if [[ -e build-requirements-${TRAVIS_PYTHON_VERSION}.txt ]]; then travis_retry pip install -r build-requirements-${TRAVIS_PYTHON_VERSION}.txt; else travis_retry pip install -r build-requirements.txt; fi - if [[ $TOX_ENV =~ gmpy2 ]] || [[ $INSTRUMENTAL ]]; then travis_retry pip install gmpy2; fi - if [[ $TOX_ENV =~ gmpyp ]]; then travis_retry pip install gmpy; fi - if [[ $INSTRUMENTAL ]]; then travis_retry pip install instrumental; fi - pip list script: - if [[ $TOX_ENV ]]; then tox -e $TOX_ENV; fi - if [[ $TOX_ENV =~ gmpy2 ]]; then tox -e speedgmpy2; fi - if [[ $TOX_ENV =~ gmpyp ]]; then tox -e speedgmpy; fi - if ! [[ $TOX_ENV =~ gmpy ]]; then tox -e speed; fi - | if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST != "false" ]]; then git checkout $PR_FIRST^ instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa/test*.py instrumental -f .instrumental.cov -s instrumental -f .instrumental.cov -s | python diff-instrumental.py --save .diff-instrumental git checkout $BRANCH instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa/test*.py instrumental -f .instrumental.cov -sr fi - | if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST == "false" ]]; then instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa instrumental -f .instrumental.cov -s # just log the values when merging instrumental -f .instrumental.cov -s | python diff-instrumental.py fi - | if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST != "false" ]]; then instrumental -f .instrumental.cov -s | python diff-instrumental.py --read .diff-instrumental --fail-under 70 --max-difference -0.1 fi after_success: - if [[ -z $INSTRUMENTAL ]]; then coveralls; fi ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1421794824.0 ecdsa-0.18.0/LICENSE0000664005075200507520000000217312457557010013267 0ustar00hkariohkario"python-ecdsa" Copyright (c) 2010 Brian Warner Portions written in 2005 by Peter Pearson and placed in the public domain. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1573067009.0 ecdsa-0.18.0/MANIFEST.in0000664005075200507520000000015013560614401014003 0ustar00hkariohkario# basic metadata include MANIFEST.in LICENSE NEWS README.md versioneer.py include src/ecdsa/_version.py ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/NEWS0000664005075200507520000003670714262276027012775 0ustar00hkariohkario* Release 0.18.0 (09 Jul 2022) New API: * `curve_by_name` in `curves` module to get a `Curve` object by providing curve name. Bug fix: * Make the `VerifyingKey` encoded with explicit parameters use the same kind of point encoding for public key and curve generator. * Better handling of malformed curve parameters (as in CVE-2022-0778); make python-ecdsa raise `MalformedPointError` instead of `AssertionError`. Doc fix: * Publish the documentation on https://ecdsa.readthedocs.io/, include explanation of basics of handling of ECC data formats and how to use the library for elliptic curve arithmetic. * Make object names more consistent, make them into hyperlinks on the readthedocs documentation. * Make security note more explicit (Ian Rodney) * Fix the `explicit` vs `named_curve` confusion in `VerifyingKey` docs. Maintenance: * Updated black version; slight changes to formatting * Include interoperability tests for Ed25519 and Ed448 with OpenSSL. * Release 0.18.0-beta2 (05 Jan 2022) New features: * Support for point precomputation for EdDSA. Maintenance: * Fix few typos (thanks to Tim Gates and Kian Meng Ang). Bug fix: * Accept private EdDSA keys that include public key in the ASN.1 structure. * Fix incompatibility with Python 3.3 in handling of memoryviews of empty strings. * Release 0.18.0-beta1 (03 Aug 2021) New features: * Support for EdDSA (Ed25519, Ed448) signature creation and verification. * Support for Ed25519 and Ed448 in PKCS#8 and public key files. New API: * CurveEdTw class to represent the Twisted Edwards curve parameters. * PointEdwards class to represent points on Twisted Edwards curve and provide point arithmetic on it. * Release 0.17.0 (27 May 2021) New API: * Keys that use explicit curve parameters can now be read and written. Reading of explicit curves can be disabled by using the `valid_curve_encodings` keyword argument in `VerifyingKey.from_pem()`, `VerifyingKey.from_der()`, `SigningKey.from_pem()`, and `SigningKey.from_der()`. * Keys can now be written with use of explicit curve parameters, use `curve_parameters_encoding` keyword argument of `VerifyingKey.to_pem()`, `VerifyingKey.to_der()`, `SigningKey.to_pem(), or `SigningKey.to_der()` to specify the format. By default `named_curve` will be used, unless the curve doesn't have an associated OID (as will be the case for an unsupported curve), then `explicit` encoding will be used. * Allow specifying acceptable point formats when loading public keys (this also fixes a minor bug where python-ecdsa would accept raw encoding for points in PKCS#8 files). Set of accepted encodings is controlled by `valid_encodings` keyword argument in `ECDH.load_received_public_key_bytes()`, `VerifyingKey.from_string()`, `VerifyingKey.from_pem()`, VerifyingKey.from_der()`. * `PointJacobi` and `Point` now inherit from `AbstractPoint` that implements the methods for parsing points. That added `from_bytes()` and `to_bytes()` methods to both of them. * Curve parameters can now be read and written to PEM and DER files. The `Curve` class supports new `to_der()`, `from_der()`, `to_pem()`, and `from_pem()` methods. Doc fix: * Describe in detail which methods can raise `RSZeroError`, and that `SigningKey.sign_deterministic()` won't raise it. Bug fix: * Correctly truncate hash values larger than the curve order (only impacted custom curves and the curves added in this release). * Correctly handle curves for which the order is larger than the prime (only impacted custom curves and the secp160r1 curve added in this release). * Fix the handling of `==` and `!=` for `Public_key`, `Private_key`, `Point`, `PointJacobi`, `VerifyingKey`, and `SigningKey` so that it behaves consistently and in the expected way both in Python 2 and Python 3. * Implement lock-less algorithm inside PointJacobi for keeping shared state so that when a calculation is aborted with KeyboardInterrupt, the state doesn't become corrupted (this fixes the occasional breakage of ecdsa in interactive shells). New features: * The `speed.py` script now provides performance for signature verification without the use of precomputation. * New curves supported: secp112r1, secp112r2, secp128r1, secp160r1. Performance: * Use 2-ary Non-Adjacent Form for the combined multiply-add. This speeds up single-shot verify (i.e. without precomputation) by about 4 to 5%. * Use native Python 3.8 support for calculating multiplicative inverses. Maintenance: * Include Python 3.9 in PyPI keywords. * More realistic branch coverage counting (ignore Python version-specific branches). * Additional test coverage to many parts of the library. * Migrate to Github Actions for Continuous Testing. * Release 0.16.1 (12 Nov 2020) New API: `VerifyingKey.precompute()` supports `lazy` argument to delay precomputation to the first time the key is used to verify a signature. Doc fixes: Documentation for the `VerifyingKey.precompute()` method. Bug fix: Make created signatures correct when the hash used is bigger than the curve order bit size and the curve order is not a multiple of 8 (this affects only users of custom curves or hashes with output larger than 512 bits). Performance: Speed up library load time by calculating the generator point multiplication tables the first time the points are used, not when they are initialised. Maintenance: Include Python 3.9 in CI testing. Test coverage for the `VerifyingKey.precompute()` method. Small speed-ups for the test suite. * Release 0.16.0 (27 Aug 2020) New features: Support for reading and writing private keys in PKCS#8 format. New API: `to_pem` and `to_der` now accept new parameter, `format`, to specify the format of the encoded files, either the default, legacy `ssleay`, or the new `pkcs8` to use PKCS#8. Note that only unencrypted PKCS#8 files are supported. Add `allow_truncate` to `verify` in `VerifyingKey`, it defaults to True, when specified as False, the use of large hashes smaller than curves will be disallowed (as it was in 0.14.1 and earlier). Bug fix: Correctly calculate signatures for private keys equal to n-1. Make `PointJacobi` and thus `SigningKey` and `VerifyingKey` pickleable. Doc fixes: `to_pem` functions return `bytes` not `str`, document them as such. `from_pem` and `from_pem` in `SigningKey` returns `SigningKey`, document them as such. Maintenance: Ensure that version checks will work with Python 4. Format the source with black. Fix uses of `assert_` in the test suite. Use newer Ubuntu in Travis to test against OpenSSL 1.1.1 (and thus test the interoperability of ECDH code in Travis). * Release 0.15 (02 Jan 2020) Bug fixes: `from curves import *` will now correctly import `BRAINPOOLP256r1` and `BRAINPOOLP320r1` curves. New features: ECDH operations have a public explicit API. Large hashes are now supported with small curves (e.g. SHA-256 can be used with NIST192p). `VerifyingKey` now supports the `precompute()` method to further speed up signature verification with the given instance of the key. New API: `VerifyingKey`, `SigningKey`, `Public_key`, `Private_key` and `CurveFp` now have `__eq__` methods. `ecdsa.ecdh` module and `ECDH` class. `PointJacobi` added. `VerifyingKey.verify_digest`, `SigningKey.sign_digest` and `SigningKey.sign_digest_deterministic` methods now accept the `allow_truncate` argument to enable the use of hashes larger than the curve order. `VerifyingKey` `from_pem` and `from_der` now accept `hashfunc` parameter like other `from*` methods. `VerifyingKey` has the `precompute` method now. `VerifyingKey.from_public_point` may now not perform validation of public point when `validate_point=False` argument is passed to the method. `CurveFp` constructor now accepts the `h` parameter - the cofactor of the elliptic curve, it's used for the selection of algorithm of public point verification. Performance: `randrange` now will perform much fewer calls to the system random number generator. `PointJacobi` introduced and used as the underlying implementation; speeds up the library by a factor of about 20. The library has now optional dependencies on `gmpy` and `gmpy2`. When they are available, the elliptic curve calculations will be about 3 times faster. Maintenance: expected minimum version of `six` module (1.9.0) is now specified explicitly in `setup.py` and tested against. Significantly faster test suite execution. * Release 0.14.1 (06 Nov 2019) Remove the obsolete `six.py` file from wheel * Release 0.14 (06 Nov 2019) Bug fixes: Strict checking of DER requirements when parsing SEQUENCE, INTEGER, OBJECT IDENTIFIER and BITSTRING objects. DER parsers now consistently raise `UnexpectedDER` exception on malformed DER encoded byte strings. Make sure that both malformed and invalid signatures raise `BadSignatureError`. Ensure that all `SigningKey` and `VerifyingKey` methods that should accept bytes-like objects actually do accept them (also avoid copying input strings). Make `SigningKey.sign_digest_deterministic` use default object hashfunc when none was provided. `encode_integer` now works for large integers. Make `encode_oid` and `remove_object` correctly handle OBJECT IDENTIFIERs with large second sub-identifier and padding in encoded sub-identifiers. New features: Deterministic signature methods now accept the `extra_entropy` parameter to further randomise the selection of `k` (the nonce) for signature, as specified in RFC6979. Recovery of the public key from signature is now supported. Support for SEC1/X9.62 formatted keys, all three encodings are supported: "uncompressed", "compressed" and "hybrid". Both string, and PEM/DER will automatically accept them, if the size of the key matches the curve. Benchmarking application now provides performance numbers that are easier to compare against OpenSSL. Support for all Brainpool curves (non-twisted). New API: `CurveFp`: `__str__` is now supported. `SigningKey.sign_deterministic`, `SigningKey.sign_digest_deterministic` and `generate_k`: extra_entropy parameter was added `Signature.recover_public_keys` was added `VerifyingKey.from_public_key_recovery` and `VerifyingKey.from_public_key_recovery_with_digest` were added `VerifyingKey.to_string`: `encoding` parameter was added `VerifyingKey.to_der` and `SigningKey.to_der`: `point_encoding` parameter was added. `encode_bitstring`: `unused` parameter was added `remove_bitstring`: `expect_unused` parameter was added `SECP256k1` is now part of `curves` `*` import `Curves`: `__repr__` is now supported `VerifyingKey`: `__repr__` is now supported Deprecations: Python 2.5 is not supported anymore - dead code removal. `from ecdsa.keys import *` will now import only objects defined in that module. Trying to decode a malformed point using `VerifyingKey.from_string` will rise now the `MalformedPointError` exception (that inherits from `AssertionError` but is not it). Multiple functions in `numbertheory` are considered deprecated: `phi`, `carmichael`, `carmichael_of_factorized`, `carmichael_of_ppower`, `order_mod`, `largest_factor_relatively_prime`, `kinda_order_mod`. They will now emit `DeprecationWarning` when used. Run the application or test suite with `-Wd` option or with `PYTHONWARNINGS=default` environment variable to verify if those methods are not used. They will be removed completely in a future release. `encode_bitstring` and `decode_bitstring` expect the number of unused bits to be passed as an argument now. They will emit `DeprecationWarning` if they are used in the deprecated way. modular_exp: will emit `DeprecationWarning` Hardening: Deterministic signatures now verify that the signature won't leak private key through a very unlikely selection of `k` value (the nonce). Nonce bit size hiding was added (hardening against Minerva attack). Please note that it DOES NOT make the library secure against side-channel attacks (timing attacks). Performance: The public key in key generation is not verified twice now, making key generation and private key reading about 33% faster. Microoptimisation to `inverse_mod` function, increasing performance by about 40% for all operations. Maintenance: Extended test coverage to newer python versions. Fixes to examples in README.md: correct commands, more correct code (now works on Python 3). Stopped bundling `six` Moved sources into `src` subdirectory Made benchmarking script standalone (runnable either with `tox -e speed`, or after installation, with `python speed.py`) Now test coverage reported to coveralls is branch coverage, not line coverage Autodetection of curves supported by OpenSSL (test suite compatibility with Fedora OpenSSL package). More readable error messages (exceptions) in `der` module. Documentation to `VerifyingKey`, `SigningKey` and signature encoder/decoder functions added. Added measuring and verifying condition coverage to Continuous Integration. Big clean-up of the test suite, use pytest parametrisation and hypothesis for better test coverage and more precise failure reporting. Use platform-provided `math.gcd`, when provided. * Release 0.13.3 (07 Oct 2019) Fix CVE-2019-14853 - possible DoS caused by malformed signature decoding and signature malleability. Also harden key decoding from string and DER encodings. * Release 0.13.2 (17 Apr 2019) Restore compatibility of setup.py with Python 2.6 and 2.7. * Release 0.13.1 (17 Apr 2019) Fix the PyPI wheel - the old version included .pyc files. * Release 0.13 (07 Feb 2015) Fix the argument order for Curve constructor (put openssl_name= at the end, with a default value) to unbreak compatibility with external callers who used the 0.11 convention. * Release 0.12 (06 Feb 2015) Switch to Versioneer for version-string management (fixing the broken `ecdsa.__version__` attribute). Add Curve.openssl_name property. Mention secp256k1 in README, test against OpenSSL. Produce "wheel" distributions. Add py3.4 and pypy3 compatibility testing. Other minor fixes. * Release 0.11 (10 Mar 2014) Add signature-encoding functions "sigencode_{strings,string,der}_canonize" which canonicalize the S value (using the smaller of the two possible values). Add "validate_point=" argument to VerifyingKey.from_string() constructor (defaults to True) which can be used to disable time-consuming point validation when importing a pre-validated verifying key. Drop python2.5 support (untested but not explicitly broken yet), update trove classifiers. * Release 0.10 (23 Oct 2013) Make the secp256k1 available in __init__.py too (thanks to Scott Bannert). * Release 0.9 (01 Oct 2013) Add secp256k1 curve (thanks to Benjamin Dauvergne). Add deterministic (no entropy needed) signatures (thanks to slush). Added py3.2/py3.3 compatibility (thanks to Elizabeth Myers). * Release 0.8 (04 Oct 2011) Small API addition: accept a hashfunc= argument in the constructors for SigningKey and VerifyingKey. This makes it easier to write wrappers that e.g. use NIST256p and SHA256 without their obligating callers to pass hashfunc=sha256 in each time they call sign() or verify(). * Release 0.7 (28 Nov 2010) Fix test failure against OpenSSL-1.0.0 (previous versions only worked against openssl-0.9.8 or earlier). Increase python requirement to py2.5 or later (still no py3 compatibility, but work is underway). Replace the use of obsolete 'sha' library with modern 'hashlib'. Clean up unit test runner (stop using subprocesses). * Release 0.6 (15 Oct 2010) Small packaging changes: extract the version number from git, add 'setup.py test' command, set exit code correctly on test failure. Fix pyflakes warnings. * Release 0.5 (27 Apr 2010) Initial release. EC-DSA signature for five NIST "Suite B" GF(p) curves: prime192v1, secp224r1, prime256v1, secp384r1, and secp521r1. DER/PEM input/output functions, seed-to-randrange helper functions. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7424872 ecdsa-0.18.0/PKG-INFO0000664005075200507520000010373614262276106013366 0ustar00hkariohkarioMetadata-Version: 2.1 Name: ecdsa Version: 0.18.0 Summary: ECDSA cryptographic signature library (pure python) Home-page: http://github.com/tlsfuzzer/python-ecdsa Author: Brian Warner Author-email: warner@lothar.com License: MIT Description: # Pure-Python ECDSA and ECDH [![Build Status](https://github.com/tlsfuzzer/python-ecdsa/workflows/GitHub%20CI/badge.svg?branch=master)](https://github.com/tlsfuzzer/python-ecdsa/actions?query=workflow%3A%22GitHub+CI%22+branch%3Amaster) [![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) [![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) ![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/context:python) [![Total alerts](https://img.shields.io/lgtm/alerts/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/alerts/) [![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) ![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) with support for ECDSA (Elliptic Curve Digital Signature Algorithm), EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH (Elliptic Curve Diffie-Hellman), implemented purely in Python, released under the MIT license. With this library, you can quickly create key pairs (signing key and verifying key), sign messages, and verify the signatures. You can also agree on a shared secret key based on exchanged public keys. The keys and signatures are very short, making them easy to handle and incorporate into other protocols. **NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** ## Features This library provides key generation, signing, verifying, and shared secret derivation for five popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, as known by the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, `secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the 256-bit curve `secp256k1` used by Bitcoin. There is also support for the regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The "short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, `brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, `brainpoolP512r1`. Few of the small curves from SEC standard are also included (mainly to speed-up testing of the library), those are: `secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. Key generation, siging and verifying is also supported for Ed25519 and Ed448 curves. No other curves are included, but it is not too hard to add support for more curves over prime fields. ## Dependencies This library uses only Python and the 'six' package. It is compatible with Python 2.6, 2.7, and 3.3+. It also supports execution on alternative implementations like pypy and pypy3. If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. Either of them can be installed after this library is installed, `python-ecdsa` will detect their presence on start-up and use them automatically. You should prefer `gmpy2` on Python3 for optimal performance. To run the OpenSSL compatibility tests, the 'openssl' tool must be in your `PATH`. This release has been tested successfully against OpenSSL 0.9.8o, 1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). ## Installation This library is available on PyPI, it's recommended to install it using `pip`: ``` pip install ecdsa ``` In case higher performance is wanted and using native code is not a problem, it's possible to specify installation together with `gmpy2`: ``` pip install ecdsa[gmpy2] ``` or (slower, legacy option): ``` pip install ecdsa[gmpy] ``` ## Speed The following table shows how long this library takes to generate key pairs (`keygen`), to sign data (`sign`), to verify those signatures (`verify`), to derive a shared secret (`ecdh`), and to verify the signatures with no key-specific precomputation (`no PC verify`). All those values are in seconds. For convenience, the inverses of those values are also provided: how many keys per second can be generated (`keygen/s`), how many signatures can be made per second (`sign/s`), how many signatures can be verified per second (`verify/s`), how many shared secrets can be derived per second (`ecdh/s`), and how many signatures with no key specific precomputation can be verified per second (`no PC verify/s`). The size of raw signature (generally the smallest the way a signature can be encoded) is also provided in the `siglen` column. Use `tox -e speed` to generate this table on your own computer. On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 ecdh ecdh/s NIST192p: 0.00104s 964.89 NIST224p: 0.00134s 748.63 NIST256p: 0.00170s 587.08 NIST384p: 0.00352s 283.90 NIST521p: 0.00717s 139.51 SECP256k1: 0.00154s 648.40 BRAINPOOLP160r1: 0.00082s 1220.70 BRAINPOOLP192r1: 0.00105s 956.75 BRAINPOOLP224r1: 0.00136s 734.52 BRAINPOOLP256r1: 0.00178s 563.32 BRAINPOOLP320r1: 0.00252s 397.23 BRAINPOOLP384r1: 0.00376s 266.27 BRAINPOOLP512r1: 0.00733s 136.35 SECP112r1: 0.00046s 2180.40 SECP112r2: 0.00045s 2229.14 SECP128r1: 0.00054s 1868.15 SECP160r1: 0.00080s 1243.98 ``` To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. On the same machine I'm getting the following performance with `gmpy2`: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 ecdh ecdh/s NIST192p: 0.00050s 1985.70 NIST224p: 0.00066s 1524.16 NIST256p: 0.00071s 1413.07 NIST384p: 0.00127s 788.89 NIST521p: 0.00230s 434.85 SECP256k1: 0.00071s 1409.95 BRAINPOOLP160r1: 0.00042s 2374.65 BRAINPOOLP192r1: 0.00051s 1960.01 BRAINPOOLP224r1: 0.00066s 1518.37 BRAINPOOLP256r1: 0.00071s 1399.90 BRAINPOOLP320r1: 0.00100s 997.21 BRAINPOOLP384r1: 0.00129s 777.51 BRAINPOOLP512r1: 0.00210s 475.99 SECP112r1: 0.00022s 4457.70 SECP112r2: 0.00024s 4252.33 SECP128r1: 0.00028s 3589.31 SECP160r1: 0.00043s 2305.02 ``` (there's also `gmpy` version, execute it using `tox -e speedgmpy`) For comparison, a highly optimised implementation (including curve-specific assembly for some curves), like the one in OpenSSL 1.1.1d, provides the following performance numbers on the same machine. Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: ``` sign verify sign/s verify/s 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 sign verify sign/s verify/s 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 op op/s 192 bits ecdh (nistp192) 0.0002s 4853.4 224 bits ecdh (nistp224) 0.0001s 15252.1 256 bits ecdh (nistp256) 0.0001s 18436.3 384 bits ecdh (nistp384) 0.0008s 1292.7 521 bits ecdh (nistp521) 0.0003s 2884.7 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 ``` Keys and signature can be serialized in different ways (see Usage, below). For a NIST192p key, the three basic representations require strings of the following lengths (in bytes): to_string: signkey= 24, verifykey= 48, signature=48 compressed: signkey=n/a, verifykey= 25, signature=n/a DER: signkey=106, verifykey= 80, signature=55 PEM: signkey=278, verifykey=162, (no support for PEM signatures) ## History In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a [message to sci.crypt][1], available from his [download site][2]. In 2010, Brian Warner wrote a wrapper around this code, to make it a bit easier and safer to use. In 2020, Hubert Kario included an implementation of elliptic curve cryptography that uses Jacobian coordinates internally, improving performance about 20-fold. You are looking at the README for this wrapper. [1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html [2]: http://webpages.charter.net/curryfans/peter/downloads.html ## Testing To run the full test suite, do this: tox -e coverage On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. The test suite uses [`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some inherent variability in the test suite execution time. One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if you want to test compatibility with it (if OpenSSL is missing, too old, or doesn't support all the curves supported in upstream releases you will see skipped tests in the above `coverage` run). ## Security This library was not designed with security in mind. If you are processing data that needs to be protected we suggest you use a quality wrapper around OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such a wrapper. The primary use-case of this library is as a portable library for interoperability testing and as a teaching tool. **This library does not protect against side-channel attacks.** Do not allow attackers to measure how long it takes you to generate a key pair or sign a message. Do not allow attackers to run code on the same physical machine when key pair generation or signing is taking place (this includes virtual machines). Do not allow attackers to measure how much power your computer uses while generating the key pair or signing a message. Do not allow attackers to measure RF interference coming from your computer while generating a key pair or signing a message. Note: just loading the private key will cause key pair generation. Other operations or attack vectors may also be vulnerable to attacks. **For a sophisticated attacker observing just one operation with a private key will be sufficient to completely reconstruct the private key**. Please also note that any Pure-python cryptographic library will be vulnerable to the same side-channel attacks. This is because Python does not provide side-channel secure primitives (with the exception of [`hmac.compare_digest()`][3]), making side-channel secure programming impossible. This library depends upon a strong source of random numbers. Do not use it on a system where `os.urandom()` does not provide cryptographically secure random numbers. [3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest ## Usage You start by creating a `SigningKey`. You can use this to sign data, by passing in data as a byte string and getting back the signature (also a byte string). You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. The `VerifyingKey` can be used to verify a signature, by passing it both the data string and the signature byte string: it either returns True or raises `BadSignatureError`. ```python from ecdsa import SigningKey sk = SigningKey.generate() # uses NIST192p vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like NIST192p (the default one). Longer curves are more secure, but take longer to use, and result in longer keys and signatures. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` The `SigningKey` can be serialized into several different formats: the shortest is to call `s=sk.to_string()`, and then re-create it with `SigningKey.from_string(s, curve)` . This short form does not record the curve, so you must be sure to pass to `from_string()` the same curve you used for the original key. The short form of a NIST192p-based signing key is just 24 bytes long. If a point encoding is invalid or it does not lie on the specified curve, `from_string()` will raise `MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_string = sk.to_string() sk2 = SigningKey.from_string(sk_string, curve=NIST384p) print(sk_string.hex()) print(sk2.to_string().hex()) ``` Note: while the methods are called `to_string()` the type they return is actually `bytes`, the "string" part is leftover from Python 2. `sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored `"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format is a shorter binary form of the same data. `SigningKey.from_pem()/.from_der()` will undo this serialization. These formats include the curve name, so you do not need to pass in a curve identifier to the deserializer. In case the file is malformed `from_der()` and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_pem = sk.to_pem() sk2 = SigningKey.from_pem(sk_pem) # sk and sk2 are the same key ``` Likewise, the `VerifyingKey` can be serialized in the same way: `vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and `to_der()/from_der()`. The same `curve=` argument is needed for `VerifyingKey.from_string()`. ```python from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_string = vk.to_string() vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) # vk and vk2 are the same key from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_pem = vk.to_pem() vk2 = VerifyingKey.from_pem(vk_pem) # vk and vk2 are the same key ``` There are a couple of different ways to compute a signature. Fundamentally, ECDSA takes a number that represents the data being signed, and returns a pair of numbers that represent the signature. The `hashfunc=` argument to `sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a fixed-length digest, which is then turned into a number that ECDSA can sign, and both sign and verify must use the same approach. The default value is `hashlib.sha1`, but if you use NIST256p or a longer curve, you can use `hashlib.sha256` instead. There are also multiple ways to represent a signature. The default `sk.sign()` and `vk.verify()` methods present it as a short string, for simplicity and minimal overhead. To use a different scheme, use the `sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper functions in the `ecdsa.util` module that can be useful here. It is also possible to create a `SigningKey` from a "seed", which is deterministic. This can be used in protocols where you want to derive consistent signing keys from some other secret, for example when you want three separate keys and only want to store a single master secret. You should start with a uniformly-distributed unguessable seed with about `curve.baselen` bytes of entropy, and then use one of the helper functions in `ecdsa.util` to convert it into an integer in the correct range, and then finally pass it into `SigningKey.from_secret_exponent()`, like this: ```python import os from ecdsa import NIST384p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key(seed): secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) return SigningKey.from_secret_exponent(secexp, curve=NIST384p) seed = os.urandom(NIST384p.baselen) # or other starting point sk1a = make_key(seed) sk1b = make_key(seed) # note: sk1a and sk1b are the same key assert sk1a.to_string() == sk1b.to_string() sk2 = make_key(b"2-"+seed) # different key assert sk1a.to_string() != sk2.to_string() ``` In case the application will verify a lot of signatures made with a single key, it's possible to precompute some of the internal values to make signature verification significantly faster. The break-even point occurs at about 100 signatures verified. To perform precomputation, you can call the `precompute()` method on `VerifyingKey` instance: ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk.precompute() signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Once `precompute()` was called, all signature verifications with this key will be faster to execute. ## OpenSSL Compatibility To produce signatures that can be verified by OpenSSL tools, or to verify signatures that were produced by those tools, use: ```python # openssl ecparam -name prime256v1 -genkey -out sk.pem # openssl ec -in sk.pem -pubout -out vk.pem # echo "data for signing" > data # openssl dgst -sha256 -sign sk.pem -out data.sig data # openssl dgst -sha256 -verify vk.pem -signature data.sig data # openssl dgst -sha256 -prverify sk.pem -signature data.sig data import hashlib from ecdsa import SigningKey, VerifyingKey from ecdsa.util import sigencode_der, sigdecode_der with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("data", "rb") as f: data = f.read() with open("data.sig", "rb") as f: signature = f.read() assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) with open("sk.pem") as f: sk = SigningKey.from_pem(f.read(), hashlib.sha256) new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) with open("data.sig2", "wb") as f: f.write(new_signature) # openssl dgst -sha256 -verify vk.pem -signature data.sig2 data ``` Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the `sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for respectively writing and reading the signatures. The keys also can be written in format that openssl can handle: ```python from ecdsa import SigningKey, VerifyingKey with open("sk.pem") as f: sk = SigningKey.from_pem(f.read()) with open("sk.pem", "wb") as f: f.write(sk.to_pem()) with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("vk.pem", "wb") as f: f.write(vk.to_pem()) ``` ## Entropy Creating a signing key with `SigningKey.generate()` requires some form of entropy (as opposed to `from_secret_exponent`/`from_string`/`from_der`/`from_pem`, which are deterministic and do not require an entropy source). The default source is `os.urandom()`, but you can pass any other function that behaves like `os.urandom` as the `entropy=` argument to do something different. This may be useful in unit tests, where you want to achieve repeatable results. The `ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong pseudo-random stream from it: ```python from ecdsa.util import PRNG from ecdsa import SigningKey rng1 = PRNG(b"seed") sk1 = SigningKey.generate(entropy=rng1) rng2 = PRNG(b"seed") sk2 = SigningKey.generate(entropy=rng2) # sk1 and sk2 are the same key ``` Likewise, ECDSA signature generation requires a random number, and each signature must use a different one (using the same number twice will immediately reveal the private signing key). The `sk.sign()` method takes an `entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. ## Deterministic Signatures If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, the code will generate a deterministic signature instead of a random one. This uses the algorithm from RFC6979 to safely generate a unique `k` value, derived from the private key and the message being signed. Each time you sign the same message with the same key, you will get the same signature (using the same `k`). This may become the default in a future version, as it is not vulnerable to failures of the entropy source. ## Examples Create a NIST192p key pair and immediately save both to disk: ```python from ecdsa import SigningKey sk = SigningKey.generate() vk = sk.verifying_key with open("private.pem", "wb") as f: f.write(sk.to_pem()) with open("public.pem", "wb") as f: f.write(vk.to_pem()) ``` Load a signing key from disk, use it to sign a message (using SHA-1), and write the signature to disk: ```python from ecdsa import SigningKey with open("private.pem") as f: sk = SigningKey.from_pem(f.read()) with open("message", "rb") as f: message = f.read() sig = sk.sign(message) with open("signature", "wb") as f: f.write(sig) ``` Load the verifying key, message, and signature from disk, and verify the signature (assume SHA-1 hash): ```python from ecdsa import VerifyingKey, BadSignatureError vk = VerifyingKey.from_pem(open("public.pem").read()) with open("message", "rb") as f: message = f.read() with open("signature", "rb") as f: sig = f.read() try: vk.verify(sig, message) print "good signature" except BadSignatureError: print "BAD SIGNATURE" ``` Create a NIST521p key pair: ```python from ecdsa import SigningKey, NIST521p sk = SigningKey.generate(curve=NIST521p) vk = sk.verifying_key ``` Create three independent signing keys from a master seed: ```python from ecdsa import NIST192p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key_from_seed(seed, curve=NIST192p): secexp = randrange_from_seed__trytryagain(seed, curve.order) return SigningKey.from_secret_exponent(secexp, curve) sk1 = make_key_from_seed("1:%s" % seed) sk2 = make_key_from_seed("2:%s" % seed) sk3 = make_key_from_seed("3:%s" % seed) ``` Load a verifying key from disk and print it using hex encoding in uncompressed and compressed format (defined in X9.62 and SEC1 standards): ```python from ecdsa import VerifyingKey with open("public.pem") as f: vk = VerifyingKey.from_pem(f.read()) print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) print("compressed: {0}".format(vk.to_string("compressed").hex())) ``` Load a verifying key from a hex string from compressed format, output uncompressed: ```python from ecdsa import VerifyingKey, NIST256p comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) print(vk.to_string("uncompressed").hex()) ``` ECDH key exchange with remote party: ```python from ecdsa import ECDH, NIST256p ecdh = ECDH(curve=NIST256p) ecdh.generate_private_key() local_public_key = ecdh.get_public_key() #send `local_public_key` to remote party and receive `remote_public_key` from remote party with open("remote_public_key.pem") as e: remote_public_key = e.read() ecdh.load_received_public_key_pem(remote_public_key) secret = ecdh.generate_sharedsecret_bytes() ``` Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* Description-Content-Type: text/markdown Provides-Extra: gmpy2 Provides-Extra: gmpy License-File: LICENSE ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1654868667.0 ecdsa-0.18.0/README.md0000664005075200507520000006760314250645273013554 0ustar00hkariohkario# Pure-Python ECDSA and ECDH [![Build Status](https://github.com/tlsfuzzer/python-ecdsa/workflows/GitHub%20CI/badge.svg?branch=master)](https://github.com/tlsfuzzer/python-ecdsa/actions?query=workflow%3A%22GitHub+CI%22+branch%3Amaster) [![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) [![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) ![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/context:python) [![Total alerts](https://img.shields.io/lgtm/alerts/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/alerts/) [![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) ![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) with support for ECDSA (Elliptic Curve Digital Signature Algorithm), EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH (Elliptic Curve Diffie-Hellman), implemented purely in Python, released under the MIT license. With this library, you can quickly create key pairs (signing key and verifying key), sign messages, and verify the signatures. You can also agree on a shared secret key based on exchanged public keys. The keys and signatures are very short, making them easy to handle and incorporate into other protocols. **NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** ## Features This library provides key generation, signing, verifying, and shared secret derivation for five popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, as known by the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, `secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the 256-bit curve `secp256k1` used by Bitcoin. There is also support for the regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The "short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, `brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, `brainpoolP512r1`. Few of the small curves from SEC standard are also included (mainly to speed-up testing of the library), those are: `secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. Key generation, siging and verifying is also supported for Ed25519 and Ed448 curves. No other curves are included, but it is not too hard to add support for more curves over prime fields. ## Dependencies This library uses only Python and the 'six' package. It is compatible with Python 2.6, 2.7, and 3.3+. It also supports execution on alternative implementations like pypy and pypy3. If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. Either of them can be installed after this library is installed, `python-ecdsa` will detect their presence on start-up and use them automatically. You should prefer `gmpy2` on Python3 for optimal performance. To run the OpenSSL compatibility tests, the 'openssl' tool must be in your `PATH`. This release has been tested successfully against OpenSSL 0.9.8o, 1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). ## Installation This library is available on PyPI, it's recommended to install it using `pip`: ``` pip install ecdsa ``` In case higher performance is wanted and using native code is not a problem, it's possible to specify installation together with `gmpy2`: ``` pip install ecdsa[gmpy2] ``` or (slower, legacy option): ``` pip install ecdsa[gmpy] ``` ## Speed The following table shows how long this library takes to generate key pairs (`keygen`), to sign data (`sign`), to verify those signatures (`verify`), to derive a shared secret (`ecdh`), and to verify the signatures with no key-specific precomputation (`no PC verify`). All those values are in seconds. For convenience, the inverses of those values are also provided: how many keys per second can be generated (`keygen/s`), how many signatures can be made per second (`sign/s`), how many signatures can be verified per second (`verify/s`), how many shared secrets can be derived per second (`ecdh/s`), and how many signatures with no key specific precomputation can be verified per second (`no PC verify/s`). The size of raw signature (generally the smallest the way a signature can be encoded) is also provided in the `siglen` column. Use `tox -e speed` to generate this table on your own computer. On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 ecdh ecdh/s NIST192p: 0.00104s 964.89 NIST224p: 0.00134s 748.63 NIST256p: 0.00170s 587.08 NIST384p: 0.00352s 283.90 NIST521p: 0.00717s 139.51 SECP256k1: 0.00154s 648.40 BRAINPOOLP160r1: 0.00082s 1220.70 BRAINPOOLP192r1: 0.00105s 956.75 BRAINPOOLP224r1: 0.00136s 734.52 BRAINPOOLP256r1: 0.00178s 563.32 BRAINPOOLP320r1: 0.00252s 397.23 BRAINPOOLP384r1: 0.00376s 266.27 BRAINPOOLP512r1: 0.00733s 136.35 SECP112r1: 0.00046s 2180.40 SECP112r2: 0.00045s 2229.14 SECP128r1: 0.00054s 1868.15 SECP160r1: 0.00080s 1243.98 ``` To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. On the same machine I'm getting the following performance with `gmpy2`: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 ecdh ecdh/s NIST192p: 0.00050s 1985.70 NIST224p: 0.00066s 1524.16 NIST256p: 0.00071s 1413.07 NIST384p: 0.00127s 788.89 NIST521p: 0.00230s 434.85 SECP256k1: 0.00071s 1409.95 BRAINPOOLP160r1: 0.00042s 2374.65 BRAINPOOLP192r1: 0.00051s 1960.01 BRAINPOOLP224r1: 0.00066s 1518.37 BRAINPOOLP256r1: 0.00071s 1399.90 BRAINPOOLP320r1: 0.00100s 997.21 BRAINPOOLP384r1: 0.00129s 777.51 BRAINPOOLP512r1: 0.00210s 475.99 SECP112r1: 0.00022s 4457.70 SECP112r2: 0.00024s 4252.33 SECP128r1: 0.00028s 3589.31 SECP160r1: 0.00043s 2305.02 ``` (there's also `gmpy` version, execute it using `tox -e speedgmpy`) For comparison, a highly optimised implementation (including curve-specific assembly for some curves), like the one in OpenSSL 1.1.1d, provides the following performance numbers on the same machine. Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: ``` sign verify sign/s verify/s 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 sign verify sign/s verify/s 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 op op/s 192 bits ecdh (nistp192) 0.0002s 4853.4 224 bits ecdh (nistp224) 0.0001s 15252.1 256 bits ecdh (nistp256) 0.0001s 18436.3 384 bits ecdh (nistp384) 0.0008s 1292.7 521 bits ecdh (nistp521) 0.0003s 2884.7 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 ``` Keys and signature can be serialized in different ways (see Usage, below). For a NIST192p key, the three basic representations require strings of the following lengths (in bytes): to_string: signkey= 24, verifykey= 48, signature=48 compressed: signkey=n/a, verifykey= 25, signature=n/a DER: signkey=106, verifykey= 80, signature=55 PEM: signkey=278, verifykey=162, (no support for PEM signatures) ## History In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a [message to sci.crypt][1], available from his [download site][2]. In 2010, Brian Warner wrote a wrapper around this code, to make it a bit easier and safer to use. In 2020, Hubert Kario included an implementation of elliptic curve cryptography that uses Jacobian coordinates internally, improving performance about 20-fold. You are looking at the README for this wrapper. [1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html [2]: http://webpages.charter.net/curryfans/peter/downloads.html ## Testing To run the full test suite, do this: tox -e coverage On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. The test suite uses [`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some inherent variability in the test suite execution time. One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if you want to test compatibility with it (if OpenSSL is missing, too old, or doesn't support all the curves supported in upstream releases you will see skipped tests in the above `coverage` run). ## Security This library was not designed with security in mind. If you are processing data that needs to be protected we suggest you use a quality wrapper around OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such a wrapper. The primary use-case of this library is as a portable library for interoperability testing and as a teaching tool. **This library does not protect against side-channel attacks.** Do not allow attackers to measure how long it takes you to generate a key pair or sign a message. Do not allow attackers to run code on the same physical machine when key pair generation or signing is taking place (this includes virtual machines). Do not allow attackers to measure how much power your computer uses while generating the key pair or signing a message. Do not allow attackers to measure RF interference coming from your computer while generating a key pair or signing a message. Note: just loading the private key will cause key pair generation. Other operations or attack vectors may also be vulnerable to attacks. **For a sophisticated attacker observing just one operation with a private key will be sufficient to completely reconstruct the private key**. Please also note that any Pure-python cryptographic library will be vulnerable to the same side-channel attacks. This is because Python does not provide side-channel secure primitives (with the exception of [`hmac.compare_digest()`][3]), making side-channel secure programming impossible. This library depends upon a strong source of random numbers. Do not use it on a system where `os.urandom()` does not provide cryptographically secure random numbers. [3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest ## Usage You start by creating a `SigningKey`. You can use this to sign data, by passing in data as a byte string and getting back the signature (also a byte string). You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. The `VerifyingKey` can be used to verify a signature, by passing it both the data string and the signature byte string: it either returns True or raises `BadSignatureError`. ```python from ecdsa import SigningKey sk = SigningKey.generate() # uses NIST192p vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like NIST192p (the default one). Longer curves are more secure, but take longer to use, and result in longer keys and signatures. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` The `SigningKey` can be serialized into several different formats: the shortest is to call `s=sk.to_string()`, and then re-create it with `SigningKey.from_string(s, curve)` . This short form does not record the curve, so you must be sure to pass to `from_string()` the same curve you used for the original key. The short form of a NIST192p-based signing key is just 24 bytes long. If a point encoding is invalid or it does not lie on the specified curve, `from_string()` will raise `MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_string = sk.to_string() sk2 = SigningKey.from_string(sk_string, curve=NIST384p) print(sk_string.hex()) print(sk2.to_string().hex()) ``` Note: while the methods are called `to_string()` the type they return is actually `bytes`, the "string" part is leftover from Python 2. `sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored `"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format is a shorter binary form of the same data. `SigningKey.from_pem()/.from_der()` will undo this serialization. These formats include the curve name, so you do not need to pass in a curve identifier to the deserializer. In case the file is malformed `from_der()` and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_pem = sk.to_pem() sk2 = SigningKey.from_pem(sk_pem) # sk and sk2 are the same key ``` Likewise, the `VerifyingKey` can be serialized in the same way: `vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and `to_der()/from_der()`. The same `curve=` argument is needed for `VerifyingKey.from_string()`. ```python from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_string = vk.to_string() vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) # vk and vk2 are the same key from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_pem = vk.to_pem() vk2 = VerifyingKey.from_pem(vk_pem) # vk and vk2 are the same key ``` There are a couple of different ways to compute a signature. Fundamentally, ECDSA takes a number that represents the data being signed, and returns a pair of numbers that represent the signature. The `hashfunc=` argument to `sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a fixed-length digest, which is then turned into a number that ECDSA can sign, and both sign and verify must use the same approach. The default value is `hashlib.sha1`, but if you use NIST256p or a longer curve, you can use `hashlib.sha256` instead. There are also multiple ways to represent a signature. The default `sk.sign()` and `vk.verify()` methods present it as a short string, for simplicity and minimal overhead. To use a different scheme, use the `sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper functions in the `ecdsa.util` module that can be useful here. It is also possible to create a `SigningKey` from a "seed", which is deterministic. This can be used in protocols where you want to derive consistent signing keys from some other secret, for example when you want three separate keys and only want to store a single master secret. You should start with a uniformly-distributed unguessable seed with about `curve.baselen` bytes of entropy, and then use one of the helper functions in `ecdsa.util` to convert it into an integer in the correct range, and then finally pass it into `SigningKey.from_secret_exponent()`, like this: ```python import os from ecdsa import NIST384p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key(seed): secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) return SigningKey.from_secret_exponent(secexp, curve=NIST384p) seed = os.urandom(NIST384p.baselen) # or other starting point sk1a = make_key(seed) sk1b = make_key(seed) # note: sk1a and sk1b are the same key assert sk1a.to_string() == sk1b.to_string() sk2 = make_key(b"2-"+seed) # different key assert sk1a.to_string() != sk2.to_string() ``` In case the application will verify a lot of signatures made with a single key, it's possible to precompute some of the internal values to make signature verification significantly faster. The break-even point occurs at about 100 signatures verified. To perform precomputation, you can call the `precompute()` method on `VerifyingKey` instance: ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk.precompute() signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Once `precompute()` was called, all signature verifications with this key will be faster to execute. ## OpenSSL Compatibility To produce signatures that can be verified by OpenSSL tools, or to verify signatures that were produced by those tools, use: ```python # openssl ecparam -name prime256v1 -genkey -out sk.pem # openssl ec -in sk.pem -pubout -out vk.pem # echo "data for signing" > data # openssl dgst -sha256 -sign sk.pem -out data.sig data # openssl dgst -sha256 -verify vk.pem -signature data.sig data # openssl dgst -sha256 -prverify sk.pem -signature data.sig data import hashlib from ecdsa import SigningKey, VerifyingKey from ecdsa.util import sigencode_der, sigdecode_der with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("data", "rb") as f: data = f.read() with open("data.sig", "rb") as f: signature = f.read() assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) with open("sk.pem") as f: sk = SigningKey.from_pem(f.read(), hashlib.sha256) new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) with open("data.sig2", "wb") as f: f.write(new_signature) # openssl dgst -sha256 -verify vk.pem -signature data.sig2 data ``` Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the `sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for respectively writing and reading the signatures. The keys also can be written in format that openssl can handle: ```python from ecdsa import SigningKey, VerifyingKey with open("sk.pem") as f: sk = SigningKey.from_pem(f.read()) with open("sk.pem", "wb") as f: f.write(sk.to_pem()) with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("vk.pem", "wb") as f: f.write(vk.to_pem()) ``` ## Entropy Creating a signing key with `SigningKey.generate()` requires some form of entropy (as opposed to `from_secret_exponent`/`from_string`/`from_der`/`from_pem`, which are deterministic and do not require an entropy source). The default source is `os.urandom()`, but you can pass any other function that behaves like `os.urandom` as the `entropy=` argument to do something different. This may be useful in unit tests, where you want to achieve repeatable results. The `ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong pseudo-random stream from it: ```python from ecdsa.util import PRNG from ecdsa import SigningKey rng1 = PRNG(b"seed") sk1 = SigningKey.generate(entropy=rng1) rng2 = PRNG(b"seed") sk2 = SigningKey.generate(entropy=rng2) # sk1 and sk2 are the same key ``` Likewise, ECDSA signature generation requires a random number, and each signature must use a different one (using the same number twice will immediately reveal the private signing key). The `sk.sign()` method takes an `entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. ## Deterministic Signatures If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, the code will generate a deterministic signature instead of a random one. This uses the algorithm from RFC6979 to safely generate a unique `k` value, derived from the private key and the message being signed. Each time you sign the same message with the same key, you will get the same signature (using the same `k`). This may become the default in a future version, as it is not vulnerable to failures of the entropy source. ## Examples Create a NIST192p key pair and immediately save both to disk: ```python from ecdsa import SigningKey sk = SigningKey.generate() vk = sk.verifying_key with open("private.pem", "wb") as f: f.write(sk.to_pem()) with open("public.pem", "wb") as f: f.write(vk.to_pem()) ``` Load a signing key from disk, use it to sign a message (using SHA-1), and write the signature to disk: ```python from ecdsa import SigningKey with open("private.pem") as f: sk = SigningKey.from_pem(f.read()) with open("message", "rb") as f: message = f.read() sig = sk.sign(message) with open("signature", "wb") as f: f.write(sig) ``` Load the verifying key, message, and signature from disk, and verify the signature (assume SHA-1 hash): ```python from ecdsa import VerifyingKey, BadSignatureError vk = VerifyingKey.from_pem(open("public.pem").read()) with open("message", "rb") as f: message = f.read() with open("signature", "rb") as f: sig = f.read() try: vk.verify(sig, message) print "good signature" except BadSignatureError: print "BAD SIGNATURE" ``` Create a NIST521p key pair: ```python from ecdsa import SigningKey, NIST521p sk = SigningKey.generate(curve=NIST521p) vk = sk.verifying_key ``` Create three independent signing keys from a master seed: ```python from ecdsa import NIST192p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key_from_seed(seed, curve=NIST192p): secexp = randrange_from_seed__trytryagain(seed, curve.order) return SigningKey.from_secret_exponent(secexp, curve) sk1 = make_key_from_seed("1:%s" % seed) sk2 = make_key_from_seed("2:%s" % seed) sk3 = make_key_from_seed("3:%s" % seed) ``` Load a verifying key from disk and print it using hex encoding in uncompressed and compressed format (defined in X9.62 and SEC1 standards): ```python from ecdsa import VerifyingKey with open("public.pem") as f: vk = VerifyingKey.from_pem(f.read()) print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) print("compressed: {0}".format(vk.to_string("compressed").hex())) ``` Load a verifying key from a hex string from compressed format, output uncompressed: ```python from ecdsa import VerifyingKey, NIST256p comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) print(vk.to_string("uncompressed").hex()) ``` ECDH key exchange with remote party: ```python from ecdsa import ECDH, NIST256p ecdh = ECDH(curve=NIST256p) ecdh.generate_private_key() local_public_key = ecdh.get_public_key() #send `local_public_key` to remote party and receive `remote_public_key` from remote party with open("remote_public_key.pem") as e: remote_public_key = e.read() ecdh.load_received_public_key_pem(remote_public_key) secret = ecdh.generate_sharedsecret_bytes() ``` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611347113.0 ecdsa-0.18.0/build-requirements-2.6.txt0000664005075200507520000000025214002632251017127 0ustar00hkariohkariotox inflect<0.3.1 pyopenssl<18 cffi<1.14 git+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls idna<2.8 unittest2 hypothesis<3 coverage mock==2.0.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611347113.0 ecdsa-0.18.0/build-requirements-2.7.txt0000664005075200507520000000016214002632251017130 0ustar00hkariohkariotox git+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls hypothesis pytest>=4.6.0 coverage ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611347113.0 ecdsa-0.18.0/build-requirements-3.3.txt0000664005075200507520000000025714002632251017132 0ustar00hkariohkariogit+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls pluggy<0.6 tox<3 wheel<0.30 virtualenv==15.2.0 enum34 hypothesis<3.44 coverage<5.0 urllib3<=1.25.8 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1622120382.0 ecdsa-0.18.0/build-requirements-3.4.txt0000664005075200507520000000020614053713676017146 0ustar00hkariohkariotox git+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls hypothesis pytest>=4.6.0 PyYAML<5.3 coverage attrs<21 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611347113.0 ecdsa-0.18.0/build-requirements.txt0000664005075200507520000000006014002632251016621 0ustar00hkariohkariotox coveralls hypothesis pytest>=4.6.0 coverage ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1654868667.0 ecdsa-0.18.0/diff-instrumental.py0000664005075200507520000000342214250645273016267 0ustar00hkariohkariofrom __future__ import print_function import sys import getopt fail_under = None max_difference = 0 read_location = None save_location = None raw = False argv = sys.argv[1:] opts, args = getopt.getopt( argv, "s:r:", ["fail-under=", "max-difference=", "save=", "read=", "raw"] ) if args: raise ValueError("Unexpected parameters: {0}".format(args)) for opt, arg in opts: if opt == "-s" or opt == "--save": save_location = arg elif opt == "-r" or opt == "--read": read_location = arg elif opt == "--fail-under": fail_under = float(arg) / 100.0 elif opt == "--max-difference": max_difference = float(arg) / 100.0 elif opt == "--raw": raw = True else: raise ValueError("Unknown option: {0}".format(opt)) total_hits = 0 total_count = 0 for line in sys.stdin.readlines(): if not line.startswith("ecdsa"): continue fields = line.split() hit, count = fields[1].split("/") total_hits += int(hit) total_count += int(count) coverage = total_hits * 1.0 / total_count if read_location: with open(read_location, "r") as f: old_coverage = float(f.read()) print("Old coverage: {0:6.2f}%".format(old_coverage * 100)) if save_location: with open(save_location, "w") as f: f.write("{0:1.40f}".format(coverage)) if raw: print("{0:6.2f}".format(coverage * 100)) else: print("Coverage: {0:6.2f}%".format(coverage * 100)) if read_location: print("Difference: {0:6.2f}%".format((old_coverage - coverage) * 100)) if fail_under and coverage < fail_under: print("ERROR: Insufficient coverage.", file=sys.stderr) sys.exit(1) if read_location and coverage - old_coverage < max_difference: print("ERROR: Too big decrease in coverage", file=sys.stderr) sys.exit(1) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7144885 ecdsa-0.18.0/docs/0000775005075200507520000000000014262276106013207 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/Makefile0000664005075200507520000000117614222604175014651 0ustar00hkariohkario# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/make.bat0000664005075200507520000000143714222604175014616 0ustar00hkariohkario@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649087543.0 ecdsa-0.18.0/docs/requirements.txt0000664005075200507520000000000414222612067016461 0ustar00hkariohkariosix ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1657371717.722488 ecdsa-0.18.0/docs/source/0000775005075200507520000000000014262276106014507 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/basics.rst0000664005075200507520000002020414262276027016505 0ustar00hkariohkario====================== Basics of ECC handling ====================== The :term:`ECC`, as any asymmetric cryptography system, deals with private keys and public keys. Private keys are generally used to create signatures, and are kept, as the name suggest, private. That's because possession of a private key allows creating a signature that can be verified with a public key. If the public key is associated with an identity (like a person or an institution), possession of the private key will allow to impersonate that identity. The public keys on the other hand are widely distributed, and they don't have to be kept private. The primary purpose of them, is to allow checking if a given signature was made with the associated private key. Number representations ====================== On a more low level, the private key is a single number, usually the size of the curve size: a NIST P-256 private key will have a size of 256 bits, though as it needs to be selected randomly, it may be a slightly smaller number (255-bit, 248-bit, etc.). Public points are a pair of numbers. That pair specifies a point on an elliptic curve (a pair of integers that satisfy the curve equation). Those two numbers are similarly close in size to the curve size, so both the ``x`` and ``y`` coordinate of a NIST P-256 curve will also be around 256 bit in size. .. note:: To be more precise, the size of the private key is related to the curve *order*, i.e. the number of points on a curve. The coordinates of the curve depend on the *field* of the curve, which usually means the size of the *prime* used for operations on points. While the *order* and the *prime* size are related and fairly close in size, it's possible to have a curve where either of them is larger by a bit (i.e. it's possible to have a curve that uses a 256 bit *prime* that has a 257 bit *order*). Since normally computers work with much smaller numbers, like 32 bit or 64 bit, we need to use special approaches to represent numbers that are hundreds of bits large. First is to decide if the numbers should be stored in a big endian format, or in little endian format. In big endian, the most significant bits are stored first, so a number like :math:`2^{16}` is saved as a three bytes: byte with value 1 and two bytes with value 0. In little endian format the least significant bits are stored first, so the number like :math:`2^{16}` would be stored as three bytes: first two bytes with value 0, than a byte with value 1. For :term:`ECDSA` big endian encoding is usually used, for :term:`EdDSA` little endian encoding is usually used. Secondly, we need to decide if the numbers need to be stored as fixed length strings (zero padded if necessary), or if they should be stored with minimal number of bytes necessary. That depends on the format and place it's used, some require strict sizes (so even if the number encoded is 1, but the curve used is 128 bit large, that number 1 still needs to be encoded with 16 bytes, with fifteen most significant bytes equal zero). Public key encoding =================== Generally, public keys (i.e. points) are expressed as fixed size byte strings. While public keys can be saved as two integers, one to represent the ``x`` coordinate and one to represent ``y`` coordinate, that actually provides a lot of redundancy. Because of the specifics of elliptic curves, for every valid ``x`` value there are only two valid ``y`` values. Moreover, if you have an ``x`` value, you can compute those two possible ``y`` values (if they exist). As such, it's possible to save just the ``x`` coordinate and the sign of the ``y`` coordinate (as the two possible values are negatives of each-other: :math:`y_1 == -y_2`). That gives us few options to represent the public point, the most common are: 1. As a concatenation of two fixed-length big-endian integers, so called :term:`raw encoding`. 2. As a concatenation of two fixed-length big-endian integers prefixed with the type of the encoding, so called :term:`uncompressed` point representation (the type is represented by a 0x04 byte). 3. As a fixed-length big-endian integer representing the ``x`` coordinate prefixed with the byte representing the combined type of the encoding and the sign of the ``y`` coordinate, so called :term:`compressed` point representation (the type is then represented by a 0x02 or a 0x03 byte). Interoperable file formats ========================== Now, while we can save the byte strings as-is and "remember" which curve was used to generate those private and public keys, interoperability usually requires to also save information about the curve together with the corresponding key. Here too there are many ways to do it: save the parameters of the used curve explicitly, use the name of the well-known curve as a string, use a numerical identifier of the well-known curve, etc. For public keys the most interoperable format is the one described in RFC5912 (look for SubjectPublicKeyInfo structure). For private keys, the RFC5915 format (also known as the ssleay format) and the PKCS#8 format (described in RFC5958) are the most popular. All three formats effectively support two ways of providing the information about the curve used: by specifying the curve parameters explicitly or by specifying the curve using ASN.1 OBJECT IDENTIFIER (OID), which is called ``named_curve``. ASN.1 OIDs are a hierarchical system of representing types of objects, for example, NIST P-256 curve is identified by the 1.2.840.10045.3.1.7 OID (in dotted-decimal formatting of the OID, also known by the ``prime256v1`` OID node name or short name). Those OIDs uniquely, identify a particular curve, but the receiver needs to know which numerical OID maps to which curve parameters. Thus the prospect of using the explicit encoding, where all the needed parameters are provided is tempting, the downside is that curve parameters may specify a *weak* curve, which is easy to attack and break (that is to deduce the private key from the public key). To verify curve parameters is complex and computationally expensive, thus generally protocols use few specific curves and require all implementations to carry the parameters of them. As such, use of ``named_curve`` parameters is generally recommended. All of the mentioned formats specify a binary encoding, called DER. That encoding uses bytes with all possible numerical values, which means it's not possible to embed it directly in text files. For uses where it's useful to limit bytes to printable characters, so that the keys can be embedded in text files or text-only protocols (like email), the PEM formatting of the DER-encoded data can be used. The PEM formatting is just a base64 encoding with appropriate header and footer. Signature formats ================= Finally, ECDSA signatures at the lowest level are a pair of numbers, usually called ``r`` and ``s``. While they are the ``x`` coordinates of special points on the curve, they are saved modulo *order* of the curve, not modulo *prime* of the curve (as a coordinate needs to be). That again means we have multiple ways of encoding those two numbers. The two most popular formats are to save them as a concatenation of big-endian integers of fixed size (determined by the curve *order*) or as a DER structure with two INTEGERS. The first of those is called the :term:``raw encoding`` inside the Python ecdsa library. As ASN.1 signature format requires the encoding of INTEGERS, and DER INTEGERs must use the fewest possible number of bytes, a numerically small value of ``r`` or ``s`` will require fewer bytes to represent in the DER structure. Thus, DER encoding isn't fixed size for a given curve, but has a maximum possible size. .. note:: As DER INTEGER uses so-called two's complement representation of numbers, the most significant bit of the most significant byte represents the *sign* of the number. If that bit is set, then the number is considered to be negative. Thus, to represent a number like 255, which in binary representation is 0b11111111 (i.e. a byte with all bits set high), the DER encoding of it will require two bytes, one zero byte to make sure the sign bit is 0, and a byte with value 255 to encode the numerical value of the integer. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/conf.py0000664005075200507520000000452414262276027016015 0ustar00hkariohkario# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath("../../src")) # -- Project information ----------------------------------------------------- project = "python-ecdsa" copyright = "2021, Brian Warner and Hubert Kario" author = "Brian Warner and Hubert Kario" # The full version, including alpha/beta/rc tags release = "0.17.0" # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.coverage", "sphinx.ext.imgmath", "sphinx.ext.viewcode", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] todo_include_todos = False # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {"https://docs.python.org/": None} autodoc_default_options = { "undoc-members": True, "inherited-members": True, } intersphinx_mapping = {"https://docs.python.org/": None} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/ec_arithmetic.rst0000664005075200507520000001010014262276027020033 0ustar00hkariohkario========================= Elliptic Curve arithmetic ========================= The python-ecdsa also provides generic API for performing operations on elliptic curve points. .. warning:: This is documentation of a very low-level API, if you want to handle keys or signatures you should look at documentation of the :py:mod:`~ecdsa.keys` module. Short Weierstrass curves ======================== There are two low-level implementations for :term:`short Weierstrass curves `: :py:class:`~ecdsa.ellipticcurve.Point` and :py:class:`~ecdsa.ellipticcurve.PointJacobi`. Both of them use the curves specified using the :py:class:`~ecdsa.ellipticcurve.CurveFp` object. You can either provide your own curve parameters or use one of the predefined curves. For example, to define a curve :math:`y^2 = x^3 + 1 * x + 4 \text{ mod } 5` use code like this: .. code:: python from ecdsa.ellipticcurve import CurveFp custom_curve = CurveFp(5, 1, 4) The predefined curves are specified in the :py:mod:`~ecdsa.ecdsa` module, but it's much easier to use the helper functions (and proper names) from the :py:mod:`~ecdsa.curves` module. For example, to get the curve parameters for the NIST P-256 curve use this code: .. code:: python from ecdsa.curves import NIST256p curve = NIST256p.curve .. tip:: You can also use :py:class:`~ecdsa.curves.Curve` to get the curve parameters from a PEM or DER file. You can also use :py:func:`~ecdsa.curves.curve_by_name` to get a curve by specifying its name. Or use the :py:func:`~ecdsa.curves.find_curve` to get a curve by specifying its ASN.1 object identifier (OID). Affine coordinates ------------------ After taking hold of curve parameters you can create a point on the curve. The :py:class:`~ecdsa.ellipticcurve.Point` uses affine coordinates, i.e. the :math:`x` and :math:`y` from the curve equation directly. To specify a point (1, 1) on the ``custom_curve`` you can use this code: .. code:: python from ecdsa.ellipticcurve import Point point_a = Point(custom_curve, 1, 1) Then it's possible to either perform scalar multiplication: .. code:: python point_b = point_a * 3 Or specify other points and perform addition: .. code:: python point_b = Point(custom_curve, 3, 2) point_c = point_a + point_b To get the affine coordinates of the point, call the ``x()`` and ``y()`` methods of the object: .. code:: python print("x: {0}, y: {1}".format(point_c.x(), point_c.y())) Projective coordinates ---------------------- When using the Jacobi coordinates, the point is defined by 3 integers, which are related to the :math:`x` and :math:`y` in the following way: .. math:: x = X/Z^2 \\ y = Y/Z^3 That means that if you have point in affine coordinates, it's possible to convert them to Jacobi by simply assuming :math:`Z = 1`. So the same points can be specified as so: .. code:: python from ecdsa.ellipticcurve import PointJacobi point_a = PointJacobi(custom_curve, 1, 1, 1) point_b = PointJacobi(custom_curve, 3, 2, 1) .. note:: Unlike the :py:class:`~ecdsa.ellipticcurve.Point`, the :py:class:`~ecdsa.ellipticcurve.PointJacobi` does **not** check if the coordinates specify a valid point on the curve as that operation is computationally expensive for Jacobi coordinates. If you want to verify if they specify a valid point, you need to convert the point to affine coordinates and use the :py:meth:`~ecdsa.ellipticcurve.CurveFp.contains_point` method. Then all the operations work exactly the same as with regular :py:class:`~ecdsa.ellipticcurve.Point` implementation. While it's not possible to get the internal :math:`X`, :math:`Y`, and :math:`Z` coordinates, it's possible to get the affine projection just like with the regular implementation: .. code:: python point_c = point_a + point_b print("x: {0}, y: {1}".format(point_c.x(), point_c.y())) All the other operations, like scalar multiplication or point addition work on projective points the same as with affine representation, but they are much more effective computationally. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.curves.rst0000664005075200507520000000017414222604175017625 0ustar00hkariohkarioecdsa.curves module =================== .. automodule:: ecdsa.curves :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.der.rst0000664005075200507520000000016314222604175017066 0ustar00hkariohkarioecdsa.der module ================ .. automodule:: ecdsa.der :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.ecdh.rst0000664005075200507520000000016614222604175017222 0ustar00hkariohkarioecdsa.ecdh module ================= .. automodule:: ecdsa.ecdh :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.ecdsa.rst0000664005075200507520000000017114222604175017372 0ustar00hkariohkarioecdsa.ecdsa module ================== .. automodule:: ecdsa.ecdsa :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/ecdsa.eddsa.rst0000664005075200507520000000017114262276027017400 0ustar00hkariohkarioecdsa.eddsa module ================== .. automodule:: ecdsa.eddsa :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.ellipticcurve.rst0000664005075200507520000000022114222604175021161 0ustar00hkariohkarioecdsa.ellipticcurve module ========================== .. automodule:: ecdsa.ellipticcurve :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.errors.rst0000664005075200507520000000017414222604175017632 0ustar00hkariohkarioecdsa.errors module =================== .. automodule:: ecdsa.errors :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.keys.rst0000664005075200507520000000016614222604175017272 0ustar00hkariohkarioecdsa.keys module ================= .. automodule:: ecdsa.keys :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.numbertheory.rst0000664005075200507520000000021614222604175021036 0ustar00hkariohkarioecdsa.numbertheory module ========================= .. automodule:: ecdsa.numbertheory :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.rfc6979.rst0000664005075200507520000000017714222604175017432 0ustar00hkariohkarioecdsa.rfc6979 module ==================== .. automodule:: ecdsa.rfc6979 :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/ecdsa.rst0000664005075200507520000000052214262276027016321 0ustar00hkariohkarioecdsa package ============= .. automodule:: ecdsa :members: :undoc-members: :show-inheritance: Submodules ---------- .. toctree:: :maxdepth: 4 ecdsa.curves ecdsa.der ecdsa.ecdh ecdsa.ecdsa ecdsa.eddsa ecdsa.ellipticcurve ecdsa.errors ecdsa.keys ecdsa.numbertheory ecdsa.rfc6979 ecdsa.util ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/docs/source/ecdsa.util.rst0000664005075200507520000000016614222604175017274 0ustar00hkariohkarioecdsa.util module ================= .. automodule:: ecdsa.util :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/glossary.rst0000664005075200507520000001001314262276027017101 0ustar00hkariohkario.. _glossary: Glossary ======== .. glossary:: :sorted: ECC Elliptic Curve Cryptography, a term for all the different ways of using elliptic curves in cryptography. Also combined term for :term:`ECDSA`, :term:`EdDSA`, :term:`ECDH`. ECDSA Elliptic Curve Digital Signature Algorithm EdDSA Edwards curve based Digital Signature Algorithm, the alternative digital signature algorithm that's used for Curve25519 or Curve448 ECDH Elliptic Curve Diffie-Hellman raw encoding Conversion of public, private keys and signatures (which in mathematical sense are integers or pairs of integers) to strings of bytes that does not use any special tags or encoding rules. For any given curve, all keys of the same type or signatures will be encoded to byte strings of the same length. In more formal sense, the integers are encoded as big-endian, constant length byte strings, where the string length is determined by the curve order (e.g. for NIST256p the order is 256 bits long, so the private key will be 32 bytes long while public key will be 64 bytes long). The encoding of a single integer is zero-padded on the left if the numerical value is low. In case of public keys and signatures, which are comprised of two integers, the integers are simply concatenated. uncompressed The most common formatting specified in PKIX standards. Specified in X9.62 and SEC1 standards. The only difference between it and :term:`raw encoding` is the prepending of a 0x04 byte. Thus an uncompressed NIST256p public key encoding will be 65 bytes long. compressed The public point representation that uses half of bytes of the :term:`uncompressed` encoding (rounded up). It uses the first byte of the encoding to specify the sign of the y coordinate and encodes the x coordinate as-is. The first byte of the encoding is equal to 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33 bytes long. hybrid A combination of :term:`uncompressed` and :term:`compressed` encodings. Both x and y coordinates are stored just as in :term:`compressed` encoding, but the first byte reflects the sign of the y coordinate. The first byte of the encoding will be equal to 0x06 or 0x7. Hybrid encoding of NIST256p public key will be 65 bytes long. PEM The acronym stands for Privacy Enhanced Mail, but currently it is used primarily as the way to encode :term:`DER` objects into text that can be either easily copy-pasted or transferred over email. It uses headers like ``-----BEGIN -----`` and footers like ``-----END -----`` to separate multiple types of objects in the same file or the object from the surrounding comments. The actual object stored is base64 encoded. DER Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects deterministically and uniquely into byte strings. ASN.1 Abstract Syntax Notation 1 is a standard description language for specifying serialisation and deserialisation of data structures in a portable and cross-platform way. bytes-like object All the types that implement the buffer protocol. That includes ``str`` (only on python2), ``bytes``, ``bytearray``, ``array.array`` and ``memoryview`` of those objects. Please note that ``array.array`` serialisation (converting it to byte string) is endianess dependant! Signature computed over ``array.array`` of integers on a big-endian system will not be verified on a little-endian system and vice-versa. set-like object All the types that support the ``in`` operator, like ``list``, ``tuple``, ``set``, ``frozenset``, etc. short Weierstrass curve A curve with the curve equation: :math:`x^2=y^3+ax+b`. Most popular curves use equation of this format (e.g. NIST256p). ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/index.rst0000664005075200507520000000445614262276027016363 0ustar00hkariohkario.. python-ecdsa documentation master file, created by sphinx-quickstart on Sat May 29 18:34:49 2021. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to python-ecdsa's documentation! ======================================== ``ecdsa`` implements `elliptic-curve cryptography (ECC) `_, more specifically the `Elliptic Curve Digital Signature Algorithm (ECDSA) `_, `Edwards-curve Digital Signature Algorithm (EdDSA) `_ and the `Elliptic Curve Diffie-Hellman (ECDH) `_ algorithms. All of those algorithms are used in many protocols in practice, like in `TLS `_ or `SSH `_. This library provides key generation, signing, verifying, and shared secret derivation for five popular NIST "Suite B" GF(p) (*prime field*) curves, with key lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, as known by the OpenSSL tool (``openssl ecparam -list_curves``), are: ``prime192v1``, ``secp224r1``, ``prime256v1``, ``secp384r1``, and ``secp521r1``. It includes the 256-bit curve ``secp256k1`` used by Bitcoin. There is also support for the regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The "short names" of those curves are: ``brainpoolP160r1``, ``brainpoolP192r1``, ``brainpoolP224r1``, ``brainpoolP256r1``, ``brainpoolP320r1``, ``brainpoolP384r1``, ``brainpoolP512r1``. Few of the small curves from SEC standard are also included (mainly to speed-up testing of the library), those are: ``secp112r1``, ``secp112r2``, ``secp128r1``, and ``secp160r1``. Key generation, signing and verifying is also supported for Ed25519 and Ed448 curves. No other curves are included, but it is not too hard to add support for more curves over prime fields. .. toctree:: :maxdepth: 2 :caption: Contents: :hidden: quickstart basics ec_arithmetic glossary modules Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`glossary` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/modules.rst0000664005075200507520000000011214262276027016705 0ustar00hkariohkariopython-ecdsa API ================ .. toctree:: :maxdepth: 4 ecdsa ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/docs/source/quickstart.rst0000664005075200507520000001136714262276027017445 0ustar00hkariohkario=============== Getting started =============== The library has just one mandatory dependency: ``six``. If you install ``python-ecdsa`` through pip, it should automatically install ``six`` too. To install it you can run the following command: .. code:: bash pip install ecdsa The high level API provided by the library is primarily in the :py:class:`~ecdsa.keys` module. There you will find the :py:class:`~ecdsa.keys.SigningKey` (the class that enables handling of the private keys) and the :py:class:`~ecdsa.keys.VerifyingKey` (the class that enables handling of the public keys). To handle shared key derivation, the :py:class:`~ecdsa.ecdh.ECDH` class is used. Finally, in case use of custom elliptic curves is necessary, the :py:class:`~ecdsa.curves.Curve` class may be needed. Key generation ============== To generate a key, import the :py:class:`~ecdsa.keys.SigningKey` and call the :py:func:`~ecdsa.keys.SigningKey.generate` function in it: .. code:: python from ecdsa.keys import SigningKey key = SigningKey.generate() By default, that will create a key that uses the NIST P-192 curve. To select a more secure curve, like NIST P-256, import it from the :py:mod:`ecdsa.curves` or from the :py:mod:`ecdsa` module: .. code:: python from ecdsa import SigningKey, NIST256p key = SigningKey.generate(curve=NIST256p) Private key storage and retrieval ================================= To store a key as string or file, you can serialise it using many formats, in general we recommend the PKCS#8 PEM encoding. If you have a :py:class:`~ecdsa.keys.SigningKey` object in ``key`` and want to save it to a file like ``priv_key.pem`` you can run the following code: .. code:: python with open("priv_key.pem", "wb") as f: f.write(key.to_pem(format="pkcs8")) .. warning:: Not specifying the ``format=pkcs8`` will create a file that uses the legacy ``ssleay`` file format which is most commonly used by applications that use OpenSSL, as that was originally the only format supported by it. For a long time though OpenSSL supports the PKCS# 8 format too. To read that file back, you can run code like this: .. code:: python from ecdsa import SigningKey with open("priv_key.pem") as f: key = SigningKey.from_pem(f.read()) .. tip:: As the format is self-describing, the parser will automatically detect if the provided file is in the ``ssleay`` or the ``pkcs8`` format and process it accordingly. Public key derivation ===================== To get the public key associated with the given private key, either call the :py:func:`~ecdsa.keys.SigningKey.get_verifying_key` method or access the ``verifying_key`` attribute in :py:class:`~ecdsa.keys.SigningKey` directly: .. code:: python from ecdsa import SigningKey, NIST256p private_key = SigningKey.generate(curve=NIST256p) public_key = private_key.verifying_key Public key storage and retrieval ================================ Similarly to private keys, public keys can be stored in files: .. code:: python from ecdsa import SigningKey private_key = SigningKey.generate() public_key = private_key.verifying_key with open("pub_key.pem", "wb") as f: f.write(public_key.to_pem()) And read from files: .. code:: python from ecdsa import VerifyingKey with open("pub_key.pem") as f: public_key = VerifyingKey.from_pem(f.read()) Signing ======= To sign a byte string stored in variable ``message`` using SigningKey in ``private_key``, SHA-256, get a signature in the DER format and save it to a file, you can use the following code: .. code:: python from hashlib import sha256 from ecdsa.util import sigencode_der sig = private_key.sign_deterministic( message, hashfunc=sha256, sigencode=sigencode_der ) with open("message.sig", "wb") as f: f.write(sig) .. note:: As cryptographic hashes (SHA-256, SHA3-256, etc.) operate on *bytes* not text strings, any text needs to be serialised into *bytes* before it can be signed. This is because encoding of string "text" results in very different bytes when it's encoded using UTF-8 and when it's encoded using UCS-2. Verifying ========= To verify a signature of a byte string in ``message`` using a VerifyingKey in ``public_key``, SHA-256 and a DER signature in a ``message.sig`` file, you can use the following code: .. code:: python from hashlib import sha256 from ecdsa import BadSignatureError from ecdsa.util import sigdecode_der with open("message.sig", "rb") as f: sig = f.read() try: ret = public_key.verify(sig, message, sha256, sigdecode=sigdecode_der) assert ret print("Valid signature") except BadSignatureError: print("Incorrect signature") ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7434871 ecdsa-0.18.0/setup.cfg0000664005075200507520000000035214262276106014100 0ustar00hkariohkario[wheel] universal = 1 [versioneer] vcs = git style = pep440 versionfile_source = src/ecdsa/_version.py versionfile_build = ecdsa/_version.py tag_prefix = python-ecdsa- parentdir_prefix = ecdsa- [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657282872.0 ecdsa-0.18.0/setup.py0000775005075200507520000000321714262020470013766 0ustar00hkariohkario#!/usr/bin/env python import io import os from setuptools import setup import versioneer commands = versioneer.get_cmdclass().copy() # Use README.md to set markdown long_description directory = os.path.abspath(os.path.dirname(__file__)) readme_path = os.path.join(directory, "README.md") with io.open(readme_path, encoding="utf-8") as read_file: long_description = read_file.read() setup( name="ecdsa", version=versioneer.get_version(), description="ECDSA cryptographic signature library (pure python)", long_description=long_description, long_description_content_type="text/markdown", author="Brian Warner", author_email="warner@lothar.com", url="http://github.com/tlsfuzzer/python-ecdsa", packages=["ecdsa"], package_dir={"": "src"}, license="MIT", cmdclass=commands, python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], install_requires=["six>=1.9.0"], extras_require={"gmpy2": "gmpy2", "gmpy": "gmpy"}, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/speed.py0000664005075200507520000000634114221640203013721 0ustar00hkariohkarioimport six import timeit from ecdsa.curves import curves def do(setup_statements, statement): # extracted from timeit.py t = timeit.Timer(stmt=statement, setup="\n".join(setup_statements)) # determine number so that 0.2 <= total time < 2.0 for i in range(1, 10): number = 10**i x = t.timeit(number) if x >= 0.2: break return x / number prnt_form = ( "{name:>16}{sep:1} {siglen:>6} {keygen:>9{form}}{unit:1} " "{keygen_inv:>9{form_inv}} {sign:>9{form}}{unit:1} " "{sign_inv:>9{form_inv}} {verify:>9{form}}{unit:1} " "{verify_inv:>9{form_inv}} {verify_single:>13{form}}{unit:1} " "{verify_single_inv:>14{form_inv}}" ) print( prnt_form.format( siglen="siglen", keygen="keygen", keygen_inv="keygen/s", sign="sign", sign_inv="sign/s", verify="verify", verify_inv="verify/s", verify_single="no PC verify", verify_single_inv="no PC verify/s", name="", sep="", unit="", form="", form_inv="", ) ) for curve in [i.name for i in curves]: S1 = "import six; from ecdsa import SigningKey, %s" % curve S2 = "sk = SigningKey.generate(%s)" % curve S3 = "msg = six.b('msg')" S4 = "sig = sk.sign(msg)" S5 = "vk = sk.get_verifying_key()" S6 = "vk.precompute()" S7 = "vk.verify(sig, msg)" # We happen to know that .generate() also calculates the # verifying key, which is the time-consuming part. If the code # were changed to lazily calculate vk, we'd need to change this # benchmark to loop over S5 instead of S2 keygen = do([S1], S2) sign = do([S1, S2, S3], S4) verf = do([S1, S2, S3, S4, S5, S6], S7) verf_single = do([S1, S2, S3, S4, S5], S7) import ecdsa c = getattr(ecdsa, curve) sig = ecdsa.SigningKey.generate(c).sign(six.b("msg")) print( prnt_form.format( name=curve, sep=":", siglen=len(sig), unit="s", keygen=keygen, keygen_inv=1.0 / keygen, sign=sign, sign_inv=1.0 / sign, verify=verf, verify_inv=1.0 / verf, verify_single=verf_single, verify_single_inv=1.0 / verf_single, form=".5f", form_inv=".2f", ) ) print("") ecdh_form = "{name:>16}{sep:1} {ecdh:>9{form}}{unit:1} {ecdh_inv:>9{form_inv}}" print( ecdh_form.format( ecdh="ecdh", ecdh_inv="ecdh/s", name="", sep="", unit="", form="", form_inv="", ) ) for curve in [i.name for i in curves]: if curve == "Ed25519" or curve == "Ed448": continue S1 = "from ecdsa import SigningKey, ECDH, {0}".format(curve) S2 = "our = SigningKey.generate({0})".format(curve) S3 = "remote = SigningKey.generate({0}).verifying_key".format(curve) S4 = "ecdh = ECDH(private_key=our, public_key=remote)" S5 = "ecdh.generate_sharedsecret_bytes()" ecdh = do([S1, S2, S3, S4], S5) print( ecdh_form.format( name=curve, sep=":", unit="s", form=".5f", form_inv=".2f", ecdh=ecdh, ecdh_inv=1.0 / ecdh, ) ) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1657371717.701489 ecdsa-0.18.0/src/0000775005075200507520000000000014262276106013046 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7434871 ecdsa-0.18.0/src/ecdsa/0000775005075200507520000000000014262276106014125 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1641309332.0 ecdsa-0.18.0/src/ecdsa/__init__.py0000664005075200507520000000314514165062224016235 0ustar00hkariohkario# while we don't use six in this file, we did bundle it for a long time, so # keep as part of module in a virtual way (through __all__) import six from .keys import ( SigningKey, VerifyingKey, BadSignatureError, BadDigestError, MalformedPointError, ) from .curves import ( NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, SECP112r1, SECP112r2, SECP128r1, SECP160r1, Ed25519, Ed448, ) from .ecdh import ( ECDH, NoKeyError, NoCurveError, InvalidCurveError, InvalidSharedSecretError, ) from .der import UnexpectedDER from . import _version # This code comes from http://github.com/tlsfuzzer/python-ecdsa __all__ = [ "curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory", "test_pyecdsa", "util", "six", ] _hush_pyflakes = [ SigningKey, VerifyingKey, BadSignatureError, BadDigestError, MalformedPointError, UnexpectedDER, InvalidCurveError, NoKeyError, InvalidSharedSecretError, ECDH, NoCurveError, NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, SECP112r1, SECP112r2, SECP128r1, SECP160r1, Ed25519, Ed448, six.b(""), ] del _hush_pyflakes __version__ = _version.get_versions()["version"] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/_compat.py0000664005075200507520000001101314222604175016112 0ustar00hkariohkario""" Common functions for providing cross-python version compatibility. """ import sys import re import binascii from six import integer_types def str_idx_as_int(string, index): """Take index'th byte from string, return as integer""" val = string[index] if isinstance(val, integer_types): return val return ord(val) if sys.version_info < (3, 0): # pragma: no branch import platform def normalise_bytes(buffer_object): """Cast the input into array of bytes.""" # flake8 runs on py3 where `buffer` indeed doesn't exist... return buffer(buffer_object) # noqa: F821 def hmac_compat(ret): return ret if ( sys.version_info < (2, 7) or sys.version_info < (2, 7, 4) or platform.system() == "Java" ): # pragma: no branch def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text) def compat26_str(val): return str(val) def bit_length(val): if val == 0: return 0 return len(bin(val)) - 2 else: def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text, flags=re.UNICODE) def compat26_str(val): return val def bit_length(val): """Return number of bits necessary to represent an integer.""" return val.bit_length() def b2a_hex(val): return binascii.b2a_hex(compat26_str(val)) def a2b_hex(val): try: return bytearray(binascii.a2b_hex(val)) except Exception as e: raise ValueError("base16 error: %s" % e) def bytes_to_int(val, byteorder): """Convert bytes to an int.""" if not val: return 0 if byteorder == "big": return int(b2a_hex(val), 16) if byteorder == "little": return int(b2a_hex(val[::-1]), 16) raise ValueError("Only 'big' and 'little' endian supported") def int_to_bytes(val, length=None, byteorder="big"): """Return number converted to bytes""" if length is None: length = byte_length(val) if byteorder == "big": return bytearray( (val >> i) & 0xFF for i in reversed(range(0, length * 8, 8)) ) if byteorder == "little": return bytearray( (val >> i) & 0xFF for i in range(0, length * 8, 8) ) raise ValueError("Only 'big' or 'little' endian supported") else: if sys.version_info < (3, 4): # pragma: no branch # on python 3.3 hmac.hmac.update() accepts only bytes, on newer # versions it does accept memoryview() also def hmac_compat(data): if not isinstance(data, bytes): # pragma: no branch return bytes(data) return data def normalise_bytes(buffer_object): """Cast the input into array of bytes.""" if not buffer_object: return b"" return memoryview(buffer_object).cast("B") else: def hmac_compat(data): return data def normalise_bytes(buffer_object): """Cast the input into array of bytes.""" return memoryview(buffer_object).cast("B") def compat26_str(val): return val def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text, flags=re.UNICODE) def a2b_hex(val): try: return bytearray(binascii.a2b_hex(bytearray(val, "ascii"))) except Exception as e: raise ValueError("base16 error: %s" % e) # pylint: disable=invalid-name # pylint is stupid here and doesn't notice it's a function, not # constant bytes_to_int = int.from_bytes # pylint: enable=invalid-name def bit_length(val): """Return number of bits necessary to represent an integer.""" return val.bit_length() def int_to_bytes(val, length=None, byteorder="big"): """Convert integer to bytes.""" if length is None: length = byte_length(val) # for gmpy we need to convert back to native int if type(val) != int: val = int(val) return bytearray(val.to_bytes(length=length, byteorder=byteorder)) def byte_length(val): """Return number of bytes necessary to represent an integer.""" length = bit_length(val) return (length + 7) // 8 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1584035876.0 ecdsa-0.18.0/src/ecdsa/_rwlock.py0000664005075200507520000000544113632474044016144 0ustar00hkariohkario# Copyright Mateusz Kobos, (c) 2011 # https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ # released under the MIT licence import threading __author__ = "Mateusz Kobos" class RWLock: """ Read-Write locking primitive Synchronization object used in a solution of so-called second readers-writers problem. In this problem, many readers can simultaneously access a share, and a writer has an exclusive access to this share. Additionally, the following constraints should be met: 1) no reader should be kept waiting if the share is currently opened for reading unless a writer is also waiting for the share, 2) no writer should be kept waiting for the share longer than absolutely necessary. The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7] with a modification -- adding an additional lock (C{self.__readers_queue}) -- in accordance with [2]. Sources: [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008 [2] P.J. Courtois, F. Heymans, D.L. Parnas: "Concurrent Control with 'Readers' and 'Writers'", Communications of the ACM, 1971 (via [3]) [3] http://en.wikipedia.org/wiki/Readers-writers_problem """ def __init__(self): """ A lock giving an even higher priority to the writer in certain cases (see [2] for a discussion). """ self.__read_switch = _LightSwitch() self.__write_switch = _LightSwitch() self.__no_readers = threading.Lock() self.__no_writers = threading.Lock() self.__readers_queue = threading.Lock() def reader_acquire(self): self.__readers_queue.acquire() self.__no_readers.acquire() self.__read_switch.acquire(self.__no_writers) self.__no_readers.release() self.__readers_queue.release() def reader_release(self): self.__read_switch.release(self.__no_writers) def writer_acquire(self): self.__write_switch.acquire(self.__no_readers) self.__no_writers.acquire() def writer_release(self): self.__no_writers.release() self.__write_switch.release(self.__no_readers) class _LightSwitch: """An auxiliary "light switch"-like object. The first thread turns on the "switch", the last one turns it off (see [1, sec. 4.2.2] for details).""" def __init__(self): self.__counter = 0 self.__mutex = threading.Lock() def acquire(self, lock): self.__mutex.acquire() self.__counter += 1 if self.__counter == 1: lock.acquire() self.__mutex.release() def release(self, lock): self.__mutex.acquire() self.__counter -= 1 if self.__counter == 0: lock.release() self.__mutex.release() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/_sha3.py0000664005075200507520000001121314221640203015456 0ustar00hkariohkario""" Implementation of the SHAKE-256 algorithm for Ed448 """ try: import hashlib hashlib.new("shake256").digest(64) def shake_256(msg, outlen): return hashlib.new("shake256", msg).digest(outlen) except (TypeError, ValueError): from ._compat import bytes_to_int, int_to_bytes # From little endian. def _from_le(s): return bytes_to_int(s, byteorder="little") # Rotate a word x by b places to the left. def _rol(x, b): return ((x << b) | (x >> (64 - b))) & (2**64 - 1) # Do the SHA-3 state transform on state s. def _sha3_transform(s): ROTATIONS = [ 0, 1, 62, 28, 27, 36, 44, 6, 55, 20, 3, 10, 43, 25, 39, 41, 45, 15, 21, 8, 18, 2, 61, 56, 14, ] PERMUTATION = [ 1, 6, 9, 22, 14, 20, 2, 12, 13, 19, 23, 15, 4, 24, 21, 8, 16, 5, 3, 18, 17, 11, 7, 10, ] RC = [ 0x0000000000000001, 0x0000000000008082, 0x800000000000808A, 0x8000000080008000, 0x000000000000808B, 0x0000000080000001, 0x8000000080008081, 0x8000000000008009, 0x000000000000008A, 0x0000000000000088, 0x0000000080008009, 0x000000008000000A, 0x000000008000808B, 0x800000000000008B, 0x8000000000008089, 0x8000000000008003, 0x8000000000008002, 0x8000000000000080, 0x000000000000800A, 0x800000008000000A, 0x8000000080008081, 0x8000000000008080, 0x0000000080000001, 0x8000000080008008, ] for rnd in range(0, 24): # AddColumnParity (Theta) c = [0] * 5 d = [0] * 5 for i in range(0, 25): c[i % 5] ^= s[i] for i in range(0, 5): d[i] = c[(i + 4) % 5] ^ _rol(c[(i + 1) % 5], 1) for i in range(0, 25): s[i] ^= d[i % 5] # RotateWords (Rho) for i in range(0, 25): s[i] = _rol(s[i], ROTATIONS[i]) # PermuteWords (Pi) t = s[PERMUTATION[0]] for i in range(0, len(PERMUTATION) - 1): s[PERMUTATION[i]] = s[PERMUTATION[i + 1]] s[PERMUTATION[-1]] = t # NonlinearMixRows (Chi) for i in range(0, 25, 5): t = [ s[i], s[i + 1], s[i + 2], s[i + 3], s[i + 4], s[i], s[i + 1], ] for j in range(0, 5): s[i + j] = t[j] ^ ((~t[j + 1]) & (t[j + 2])) # AddRoundConstant (Iota) s[0] ^= RC[rnd] # Reinterpret octet array b to word array and XOR it to state s. def _reinterpret_to_words_and_xor(s, b): for j in range(0, len(b) // 8): s[j] ^= _from_le(b[8 * j : 8 * j + 8]) # Reinterpret word array w to octet array and return it. def _reinterpret_to_octets(w): mp = bytearray() for j in range(0, len(w)): mp += int_to_bytes(w[j], 8, byteorder="little") return mp def _sha3_raw(msg, r_w, o_p, e_b): """Semi-generic SHA-3 implementation""" r_b = 8 * r_w s = [0] * 25 # Handle whole blocks. idx = 0 blocks = len(msg) // r_b for i in range(0, blocks): _reinterpret_to_words_and_xor(s, msg[idx : idx + r_b]) idx += r_b _sha3_transform(s) # Handle last block padding. m = bytearray(msg[idx:]) m.append(o_p) while len(m) < r_b: m.append(0) m[len(m) - 1] |= 128 # Handle padded last block. _reinterpret_to_words_and_xor(s, m) _sha3_transform(s) # Output. out = bytearray() while len(out) < e_b: out += _reinterpret_to_octets(s[:r_w]) _sha3_transform(s) return out[:e_b] def shake_256(msg, outlen): return _sha3_raw(msg, 17, 31, outlen) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7434871 ecdsa-0.18.0/src/ecdsa/_version.py0000664005075200507520000000076214262276106016330 0ustar00hkariohkario # This file was generated by 'versioneer.py' (0.21) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' { "date": "2022-07-09T14:49:17+0200", "dirty": false, "error": null, "full-revisionid": "341e0d8be9fedf66fbc9a95630b4ed2138343380", "version": "0.18.0" } ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1654868667.0 ecdsa-0.18.0/src/ecdsa/curves.py0000664005075200507520000003406614250645273016021 0ustar00hkariohkariofrom __future__ import division from six import PY2 from . import der, ecdsa, ellipticcurve, eddsa from .util import orderlen, number_to_string, string_to_number from ._compat import normalise_bytes, bit_length # orderlen was defined in this module previously, so keep it in __all__, # will need to mark it as deprecated later __all__ = [ "UnknownCurveError", "orderlen", "Curve", "SECP112r1", "SECP112r2", "SECP128r1", "SECP160r1", "NIST192p", "NIST224p", "NIST256p", "NIST384p", "NIST521p", "curves", "find_curve", "curve_by_name", "SECP256k1", "BRAINPOOLP160r1", "BRAINPOOLP192r1", "BRAINPOOLP224r1", "BRAINPOOLP256r1", "BRAINPOOLP320r1", "BRAINPOOLP384r1", "BRAINPOOLP512r1", "PRIME_FIELD_OID", "CHARACTERISTIC_TWO_FIELD_OID", "Ed25519", "Ed448", ] PRIME_FIELD_OID = (1, 2, 840, 10045, 1, 1) CHARACTERISTIC_TWO_FIELD_OID = (1, 2, 840, 10045, 1, 2) class UnknownCurveError(Exception): pass class Curve: def __init__(self, name, curve, generator, oid, openssl_name=None): self.name = name self.openssl_name = openssl_name # maybe None self.curve = curve self.generator = generator self.order = generator.order() if isinstance(curve, ellipticcurve.CurveEdTw): # EdDSA keys are special in that both private and public # are the same size (as it's defined only with compressed points) # +1 for the sign bit and then round up self.baselen = (bit_length(curve.p()) + 1 + 7) // 8 self.verifying_key_length = self.baselen else: self.baselen = orderlen(self.order) self.verifying_key_length = 2 * orderlen(curve.p()) self.signature_length = 2 * self.baselen self.oid = oid if oid: self.encoded_oid = der.encode_oid(*oid) def __eq__(self, other): if isinstance(other, Curve): return ( self.curve == other.curve and self.generator == other.generator ) return NotImplemented def __ne__(self, other): return not self == other def __repr__(self): return self.name def to_der(self, encoding=None, point_encoding="uncompressed"): """Serialise the curve parameters to binary string. :param str encoding: the format to save the curve parameters in. Default is ``named_curve``, with fallback being the ``explicit`` if the OID is not set for the curve. :param str point_encoding: the point encoding of the generator when explicit curve encoding is used. Ignored for ``named_curve`` format. :return: DER encoded ECParameters structure :rtype: bytes """ if encoding is None: if self.oid: encoding = "named_curve" else: encoding = "explicit" if encoding not in ("named_curve", "explicit"): raise ValueError( "Only 'named_curve' and 'explicit' encodings supported" ) if encoding == "named_curve": if not self.oid: raise UnknownCurveError( "Can't encode curve using named_curve encoding without " "associated curve OID" ) return der.encode_oid(*self.oid) elif isinstance(self.curve, ellipticcurve.CurveEdTw): assert encoding == "explicit" raise UnknownCurveError( "Twisted Edwards curves don't support explicit encoding" ) # encode the ECParameters sequence curve_p = self.curve.p() version = der.encode_integer(1) field_id = der.encode_sequence( der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p) ) curve = der.encode_sequence( der.encode_octet_string( number_to_string(self.curve.a() % curve_p, curve_p) ), der.encode_octet_string( number_to_string(self.curve.b() % curve_p, curve_p) ), ) base = der.encode_octet_string(self.generator.to_bytes(point_encoding)) order = der.encode_integer(self.generator.order()) seq_elements = [version, field_id, curve, base, order] if self.curve.cofactor(): cofactor = der.encode_integer(self.curve.cofactor()) seq_elements.append(cofactor) return der.encode_sequence(*seq_elements) def to_pem(self, encoding=None, point_encoding="uncompressed"): """ Serialise the curve parameters to the :term:`PEM` format. :param str encoding: the format to save the curve parameters in. Default is ``named_curve``, with fallback being the ``explicit`` if the OID is not set for the curve. :param str point_encoding: the point encoding of the generator when explicit curve encoding is used. Ignored for ``named_curve`` format. :return: PEM encoded ECParameters structure :rtype: str """ return der.topem( self.to_der(encoding, point_encoding), "EC PARAMETERS" ) @staticmethod def from_der(data, valid_encodings=None): """Decode the curve parameters from DER file. :param data: the binary string to decode the parameters from :type data: :term:`bytes-like object` :param valid_encodings: set of names of allowed encodings, by default all (set by passing ``None``), supported ones are ``named_curve`` and ``explicit`` :type valid_encodings: :term:`set-like object` """ if not valid_encodings: valid_encodings = set(("named_curve", "explicit")) if not all(i in ["named_curve", "explicit"] for i in valid_encodings): raise ValueError( "Only named_curve and explicit encodings supported" ) data = normalise_bytes(data) if not der.is_sequence(data): if "named_curve" not in valid_encodings: raise der.UnexpectedDER( "named_curve curve parameters not allowed" ) oid, empty = der.remove_object(data) if empty: raise der.UnexpectedDER("Unexpected data after OID") return find_curve(oid) if "explicit" not in valid_encodings: raise der.UnexpectedDER("explicit curve parameters not allowed") seq, empty = der.remove_sequence(data) if empty: raise der.UnexpectedDER( "Unexpected data after ECParameters structure" ) # decode the ECParameters sequence version, rest = der.remove_integer(seq) if version != 1: raise der.UnexpectedDER("Unknown parameter encoding format") field_id, rest = der.remove_sequence(rest) curve, rest = der.remove_sequence(rest) base_bytes, rest = der.remove_octet_string(rest) order, rest = der.remove_integer(rest) cofactor = None if rest: # the ASN.1 specification of ECParameters allows for future # extensions of the sequence, so ignore the remaining bytes cofactor, _ = der.remove_integer(rest) # decode the ECParameters.fieldID sequence field_type, rest = der.remove_object(field_id) if field_type == CHARACTERISTIC_TWO_FIELD_OID: raise UnknownCurveError("Characteristic 2 curves unsupported") if field_type != PRIME_FIELD_OID: raise UnknownCurveError( "Unknown field type: {0}".format(field_type) ) prime, empty = der.remove_integer(rest) if empty: raise der.UnexpectedDER( "Unexpected data after ECParameters.fieldID.Prime-p element" ) # decode the ECParameters.curve sequence curve_a_bytes, rest = der.remove_octet_string(curve) curve_b_bytes, rest = der.remove_octet_string(rest) # seed can be defined here, but we don't parse it, so ignore `rest` curve_a = string_to_number(curve_a_bytes) curve_b = string_to_number(curve_b_bytes) curve_fp = ellipticcurve.CurveFp(prime, curve_a, curve_b, cofactor) # decode the ECParameters.base point base = ellipticcurve.PointJacobi.from_bytes( curve_fp, base_bytes, valid_encodings=("uncompressed", "compressed", "hybrid"), order=order, generator=True, ) tmp_curve = Curve("unknown", curve_fp, base, None) # if the curve matches one of the well-known ones, use the well-known # one in preference, as it will have the OID and name associated for i in curves: if tmp_curve == i: return i return tmp_curve @classmethod def from_pem(cls, string, valid_encodings=None): """Decode the curve parameters from PEM file. :param str string: the text string to decode the parameters from :param valid_encodings: set of names of allowed encodings, by default all (set by passing ``None``), supported ones are ``named_curve`` and ``explicit`` :type valid_encodings: :term:`set-like object` """ if not PY2 and isinstance(string, str): # pragma: no branch string = string.encode() ec_param_index = string.find(b"-----BEGIN EC PARAMETERS-----") if ec_param_index == -1: raise der.UnexpectedDER("EC PARAMETERS PEM header not found") return cls.from_der( der.unpem(string[ec_param_index:]), valid_encodings ) # the SEC curves SECP112r1 = Curve( "SECP112r1", ecdsa.curve_112r1, ecdsa.generator_112r1, (1, 3, 132, 0, 6), "secp112r1", ) SECP112r2 = Curve( "SECP112r2", ecdsa.curve_112r2, ecdsa.generator_112r2, (1, 3, 132, 0, 7), "secp112r2", ) SECP128r1 = Curve( "SECP128r1", ecdsa.curve_128r1, ecdsa.generator_128r1, (1, 3, 132, 0, 28), "secp128r1", ) SECP160r1 = Curve( "SECP160r1", ecdsa.curve_160r1, ecdsa.generator_160r1, (1, 3, 132, 0, 8), "secp160r1", ) # the NIST curves NIST192p = Curve( "NIST192p", ecdsa.curve_192, ecdsa.generator_192, (1, 2, 840, 10045, 3, 1, 1), "prime192v1", ) NIST224p = Curve( "NIST224p", ecdsa.curve_224, ecdsa.generator_224, (1, 3, 132, 0, 33), "secp224r1", ) NIST256p = Curve( "NIST256p", ecdsa.curve_256, ecdsa.generator_256, (1, 2, 840, 10045, 3, 1, 7), "prime256v1", ) NIST384p = Curve( "NIST384p", ecdsa.curve_384, ecdsa.generator_384, (1, 3, 132, 0, 34), "secp384r1", ) NIST521p = Curve( "NIST521p", ecdsa.curve_521, ecdsa.generator_521, (1, 3, 132, 0, 35), "secp521r1", ) SECP256k1 = Curve( "SECP256k1", ecdsa.curve_secp256k1, ecdsa.generator_secp256k1, (1, 3, 132, 0, 10), "secp256k1", ) BRAINPOOLP160r1 = Curve( "BRAINPOOLP160r1", ecdsa.curve_brainpoolp160r1, ecdsa.generator_brainpoolp160r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), "brainpoolP160r1", ) BRAINPOOLP192r1 = Curve( "BRAINPOOLP192r1", ecdsa.curve_brainpoolp192r1, ecdsa.generator_brainpoolp192r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 3), "brainpoolP192r1", ) BRAINPOOLP224r1 = Curve( "BRAINPOOLP224r1", ecdsa.curve_brainpoolp224r1, ecdsa.generator_brainpoolp224r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 5), "brainpoolP224r1", ) BRAINPOOLP256r1 = Curve( "BRAINPOOLP256r1", ecdsa.curve_brainpoolp256r1, ecdsa.generator_brainpoolp256r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 7), "brainpoolP256r1", ) BRAINPOOLP320r1 = Curve( "BRAINPOOLP320r1", ecdsa.curve_brainpoolp320r1, ecdsa.generator_brainpoolp320r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 9), "brainpoolP320r1", ) BRAINPOOLP384r1 = Curve( "BRAINPOOLP384r1", ecdsa.curve_brainpoolp384r1, ecdsa.generator_brainpoolp384r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 11), "brainpoolP384r1", ) BRAINPOOLP512r1 = Curve( "BRAINPOOLP512r1", ecdsa.curve_brainpoolp512r1, ecdsa.generator_brainpoolp512r1, (1, 3, 36, 3, 3, 2, 8, 1, 1, 13), "brainpoolP512r1", ) Ed25519 = Curve( "Ed25519", eddsa.curve_ed25519, eddsa.generator_ed25519, (1, 3, 101, 112), ) Ed448 = Curve( "Ed448", eddsa.curve_ed448, eddsa.generator_ed448, (1, 3, 101, 113), ) # no order in particular, but keep previously added curves first curves = [ NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, SECP112r1, SECP112r2, SECP128r1, SECP160r1, Ed25519, Ed448, ] def find_curve(oid_curve): """Select a curve based on its OID :param tuple[int,...] oid_curve: ASN.1 Object Identifier of the curve to return, like ``(1, 2, 840, 10045, 3, 1, 7)`` for ``NIST256p``. :raises UnknownCurveError: When the oid doesn't match any of the supported curves :rtype: ~ecdsa.curves.Curve """ for c in curves: if c.oid == oid_curve: return c raise UnknownCurveError( "I don't know about the curve with oid %s." "I only know about these: %s" % (oid_curve, [c.name for c in curves]) ) def curve_by_name(name): """Select a curve based on its name. Returns a :py:class:`~ecdsa.curves.Curve` object with a ``name`` name. Note that ``name`` is case-sensitve. :param str name: Name of the curve to return, like ``NIST256p`` or ``prime256v1`` :raises UnknownCurveError: When the name doesn't match any of the supported curves :rtype: ~ecdsa.curves.Curve """ for c in curves: if name == c.name or (c.openssl_name and name == c.openssl_name): return c raise UnknownCurveError( "Curve with name {0!r} unknown, only curves supported: {1}".format( name, [c.name for c in curves] ) ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/der.py0000664005075200507520000003343514221640203015245 0ustar00hkariohkariofrom __future__ import division import binascii import base64 import warnings from itertools import chain from six import int2byte, b, text_type from ._compat import str_idx_as_int class UnexpectedDER(Exception): pass def encode_constructed(tag, value): return int2byte(0xA0 + tag) + encode_length(len(value)) + value def encode_integer(r): assert r >= 0 # can't support negative numbers yet h = ("%x" % r).encode() if len(h) % 2: h = b("0") + h s = binascii.unhexlify(h) num = str_idx_as_int(s, 0) if num <= 0x7F: return b("\x02") + encode_length(len(s)) + s else: # DER integers are two's complement, so if the first byte is # 0x80-0xff then we need an extra 0x00 byte to prevent it from # looking negative. return b("\x02") + encode_length(len(s) + 1) + b("\x00") + s # sentry object to check if an argument was specified (used to detect # deprecated calling convention) _sentry = object() def encode_bitstring(s, unused=_sentry): """ Encode a binary string as a BIT STRING using :term:`DER` encoding. Note, because there is no native Python object that can encode an actual bit string, this function only accepts byte strings as the `s` argument. The byte string is the actual bit string that will be encoded, padded on the right (least significant bits, looking from big endian perspective) to the first full byte. If the bit string has a bit length that is multiple of 8, then the padding should not be included. For correct DER encoding the padding bits MUST be set to 0. Number of bits of padding need to be provided as the `unused` parameter. In case they are specified as None, it means the number of unused bits is already encoded in the string as the first byte. The deprecated call convention specifies just the `s` parameters and encodes the number of unused bits as first parameter (same convention as with None). Empty string must be encoded with `unused` specified as 0. Future version of python-ecdsa will make specifying the `unused` argument mandatory. :param s: bytes to encode :type s: bytes like object :param unused: number of bits at the end of `s` that are unused, must be between 0 and 7 (inclusive) :type unused: int or None :raises ValueError: when `unused` is too large or too small :return: `s` encoded using DER :rtype: bytes """ encoded_unused = b"" len_extra = 0 if unused is _sentry: warnings.warn( "Legacy call convention used, unused= needs to be specified", DeprecationWarning, ) elif unused is not None: if not 0 <= unused <= 7: raise ValueError("unused must be integer between 0 and 7") if unused: if not s: raise ValueError("unused is non-zero but s is empty") last = str_idx_as_int(s, -1) if last & (2**unused - 1): raise ValueError("unused bits must be zeros in DER") encoded_unused = int2byte(unused) len_extra = 1 return b("\x03") + encode_length(len(s) + len_extra) + encoded_unused + s def encode_octet_string(s): return b("\x04") + encode_length(len(s)) + s def encode_oid(first, second, *pieces): assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second body = b"".join( chain( [encode_number(40 * first + second)], (encode_number(p) for p in pieces), ) ) return b"\x06" + encode_length(len(body)) + body def encode_sequence(*encoded_pieces): total_len = sum([len(p) for p in encoded_pieces]) return b("\x30") + encode_length(total_len) + b("").join(encoded_pieces) def encode_number(n): b128_digits = [] while n: b128_digits.insert(0, (n & 0x7F) | 0x80) n = n >> 7 if not b128_digits: b128_digits.append(0) b128_digits[-1] &= 0x7F return b("").join([int2byte(d) for d in b128_digits]) def is_sequence(string): return string and string[:1] == b"\x30" def remove_constructed(string): s0 = str_idx_as_int(string, 0) if (s0 & 0xE0) != 0xA0: raise UnexpectedDER( "wanted type 'constructed tag' (0xa0-0xbf), got 0x%02x" % s0 ) tag = s0 & 0x1F length, llen = read_length(string[1:]) body = string[1 + llen : 1 + llen + length] rest = string[1 + llen + length :] return tag, body, rest def remove_sequence(string): if not string: raise UnexpectedDER("Empty string does not encode a sequence") if string[:1] != b"\x30": n = str_idx_as_int(string, 0) raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n) length, lengthlength = read_length(string[1:]) if length > len(string) - 1 - lengthlength: raise UnexpectedDER("Length longer than the provided buffer") endseq = 1 + lengthlength + length return string[1 + lengthlength : endseq], string[endseq:] def remove_octet_string(string): if string[:1] != b"\x04": n = str_idx_as_int(string, 0) raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n) length, llen = read_length(string[1:]) body = string[1 + llen : 1 + llen + length] rest = string[1 + llen + length :] return body, rest def remove_object(string): if not string: raise UnexpectedDER( "Empty string does not encode an object identifier" ) if string[:1] != b"\x06": n = str_idx_as_int(string, 0) raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n) length, lengthlength = read_length(string[1:]) body = string[1 + lengthlength : 1 + lengthlength + length] rest = string[1 + lengthlength + length :] if not body: raise UnexpectedDER("Empty object identifier") if len(body) != length: raise UnexpectedDER( "Length of object identifier longer than the provided buffer" ) numbers = [] while body: n, ll = read_number(body) numbers.append(n) body = body[ll:] n0 = numbers.pop(0) if n0 < 80: first = n0 // 40 else: first = 2 second = n0 - (40 * first) numbers.insert(0, first) numbers.insert(1, second) return tuple(numbers), rest def remove_integer(string): if not string: raise UnexpectedDER( "Empty string is an invalid encoding of an integer" ) if string[:1] != b"\x02": n = str_idx_as_int(string, 0) raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n) length, llen = read_length(string[1:]) if length > len(string) - 1 - llen: raise UnexpectedDER("Length longer than provided buffer") if length == 0: raise UnexpectedDER("0-byte long encoding of integer") numberbytes = string[1 + llen : 1 + llen + length] rest = string[1 + llen + length :] msb = str_idx_as_int(numberbytes, 0) if not msb < 0x80: raise UnexpectedDER("Negative integers are not supported") # check if the encoding is the minimal one (DER requirement) if length > 1 and not msb: # leading zero byte is allowed if the integer would have been # considered a negative number otherwise smsb = str_idx_as_int(numberbytes, 1) if smsb < 0x80: raise UnexpectedDER( "Invalid encoding of integer, unnecessary " "zero padding bytes" ) return int(binascii.hexlify(numberbytes), 16), rest def read_number(string): number = 0 llen = 0 if str_idx_as_int(string, 0) == 0x80: raise UnexpectedDER("Non minimal encoding of OID subidentifier") # base-128 big endian, with most significant bit set in all but the last # byte while True: if llen >= len(string): raise UnexpectedDER("ran out of length bytes") number = number << 7 d = str_idx_as_int(string, llen) number += d & 0x7F llen += 1 if not d & 0x80: break return number, llen def encode_length(l): assert l >= 0 if l < 0x80: return int2byte(l) s = ("%x" % l).encode() if len(s) % 2: s = b("0") + s s = binascii.unhexlify(s) llen = len(s) return int2byte(0x80 | llen) + s def read_length(string): if not string: raise UnexpectedDER("Empty string can't encode valid length value") num = str_idx_as_int(string, 0) if not (num & 0x80): # short form return (num & 0x7F), 1 # else long-form: b0&0x7f is number of additional base256 length bytes, # big-endian llen = num & 0x7F if not llen: raise UnexpectedDER("Invalid length encoding, length of length is 0") if llen > len(string) - 1: raise UnexpectedDER("Length of length longer than provided buffer") # verify that the encoding is minimal possible (DER requirement) msb = str_idx_as_int(string, 1) if not msb or llen == 1 and msb < 0x80: raise UnexpectedDER("Not minimal encoding of length") return int(binascii.hexlify(string[1 : 1 + llen]), 16), 1 + llen def remove_bitstring(string, expect_unused=_sentry): """ Remove a BIT STRING object from `string` following :term:`DER`. The `expect_unused` can be used to specify if the bit string should have the amount of unused bits decoded or not. If it's an integer, any read BIT STRING that has number of unused bits different from specified value will cause UnexpectedDER exception to be raised (this is especially useful when decoding BIT STRINGS that have DER encoded object in them; DER encoding is byte oriented, so the unused bits will always equal 0). If the `expect_unused` is specified as None, the first element returned will be a tuple, with the first value being the extracted bit string while the second value will be the decoded number of unused bits. If the `expect_unused` is unspecified, the decoding of byte with number of unused bits will not be attempted and the bit string will be returned as-is, the callee will be required to decode it and verify its correctness. Future version of python will require the `expected_unused` parameter to be specified. :param string: string of bytes to extract the BIT STRING from :type string: bytes like object :param expect_unused: number of bits that should be unused in the BIT STRING, or None, to return it to caller :type expect_unused: int or None :raises UnexpectedDER: when the encoding does not follow DER. :return: a tuple with first element being the extracted bit string and the second being the remaining bytes in the string (if any); if the `expect_unused` is specified as None, the first element of the returned tuple will be a tuple itself, with first element being the bit string as bytes and the second element being the number of unused bits at the end of the byte array as an integer :rtype: tuple """ if not string: raise UnexpectedDER("Empty string does not encode a bitstring") if expect_unused is _sentry: warnings.warn( "Legacy call convention used, expect_unused= needs to be" " specified", DeprecationWarning, ) num = str_idx_as_int(string, 0) if string[:1] != b"\x03": raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num) length, llen = read_length(string[1:]) if not length: raise UnexpectedDER("Invalid length of bit string, can't be 0") body = string[1 + llen : 1 + llen + length] rest = string[1 + llen + length :] if expect_unused is not _sentry: unused = str_idx_as_int(body, 0) if not 0 <= unused <= 7: raise UnexpectedDER("Invalid encoding of unused bits") if expect_unused is not None and expect_unused != unused: raise UnexpectedDER("Unexpected number of unused bits") body = body[1:] if unused: if not body: raise UnexpectedDER("Invalid encoding of empty bit string") last = str_idx_as_int(body, -1) # verify that all the unused bits are set to zero (DER requirement) if last & (2**unused - 1): raise UnexpectedDER("Non zero padding bits in bit string") if expect_unused is None: body = (body, unused) return body, rest # SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING) # signatures: (from RFC3279) # ansi-X9-62 OBJECT IDENTIFIER ::= { # iso(1) member-body(2) us(840) 10045 } # # id-ecSigType OBJECT IDENTIFIER ::= { # ansi-X9-62 signatures(4) } # ecdsa-with-SHA1 OBJECT IDENTIFIER ::= { # id-ecSigType 1 } # so 1,2,840,10045,4,1 # so 0x42, .. .. # Ecdsa-Sig-Value ::= SEQUENCE { # r INTEGER, # s INTEGER } # id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 } # # id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 } # I think the secp224r1 identifier is (t=06,l=05,v=2b81040021) # secp224r1 OBJECT IDENTIFIER ::= { # iso(1) identified-organization(3) certicom(132) curve(0) 33 } # and the secp384r1 is (t=06,l=05,v=2b81040022) # secp384r1 OBJECT IDENTIFIER ::= { # iso(1) identified-organization(3) certicom(132) curve(0) 34 } def unpem(pem): if isinstance(pem, text_type): # pragma: no branch pem = pem.encode() d = b("").join( [ l.strip() for l in pem.split(b("\n")) if l and not l.startswith(b("-----")) ] ) return base64.b64decode(d) def topem(der, name): b64 = base64.b64encode(der) lines = [("-----BEGIN %s-----\n" % name).encode()] lines.extend( [b64[start : start + 64] + b("\n") for start in range(0, len(b64), 64)] ) lines.append(("-----END %s-----\n" % name).encode()) return b("").join(lines) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/ecdh.py0000664005075200507520000002540314222604175015403 0ustar00hkariohkario""" Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations. """ from .util import number_to_string from .ellipticcurve import INFINITY from .keys import SigningKey, VerifyingKey __all__ = [ "ECDH", "NoKeyError", "NoCurveError", "InvalidCurveError", "InvalidSharedSecretError", ] class NoKeyError(Exception): """ECDH. Key not found but it is needed for operation.""" pass class NoCurveError(Exception): """ECDH. Curve not set but it is needed for operation.""" pass class InvalidCurveError(Exception): """ ECDH. Raised in case the public and private keys use different curves. """ pass class InvalidSharedSecretError(Exception): """ECDH. Raised in case the shared secret we obtained is an INFINITY.""" pass class ECDH(object): """ Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol. Allows two parties, each having an elliptic-curve public-private key pair, to establish a shared secret over an insecure channel """ def __init__(self, curve=None, private_key=None, public_key=None): """ ECDH init. Call can be initialised without parameters, then the first operation (loading either key) will set the used curve. All parameters must be ultimately set before shared secret calculation will be allowed. :param curve: curve for operations :type curve: Curve :param private_key: `my` private key for ECDH :type private_key: SigningKey :param public_key: `their` public key for ECDH :type public_key: VerifyingKey """ self.curve = curve self.private_key = None self.public_key = None if private_key: self.load_private_key(private_key) if public_key: self.load_received_public_key(public_key) def _get_shared_secret(self, remote_public_key): if not self.private_key: raise NoKeyError( "Private key needs to be set to create shared secret" ) if not self.public_key: raise NoKeyError( "Public key needs to be set to create shared secret" ) if not ( self.private_key.curve == self.curve == remote_public_key.curve ): raise InvalidCurveError( "Curves for public key and private key is not equal." ) # shared secret = PUBKEYtheirs * PRIVATEKEYours result = ( remote_public_key.pubkey.point * self.private_key.privkey.secret_multiplier ) if result == INFINITY: raise InvalidSharedSecretError("Invalid shared secret (INFINITY).") return result.x() def set_curve(self, key_curve): """ Set the working curve for ecdh operations. :param key_curve: curve from `curves` module :type key_curve: Curve """ self.curve = key_curve def generate_private_key(self): """ Generate local private key for ecdh operation with curve that was set. :raises NoCurveError: Curve must be set before key generation. :return: public (verifying) key from this private key. :rtype: VerifyingKey """ if not self.curve: raise NoCurveError("Curve must be set prior to key generation.") return self.load_private_key(SigningKey.generate(curve=self.curve)) def load_private_key(self, private_key): """ Load private key from SigningKey (keys.py) object. Needs to have the same curve as was set with set_curve method. If curve is not set - it sets from this SigningKey :param private_key: Initialised SigningKey class :type private_key: SigningKey :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. :rtype: VerifyingKey """ if not self.curve: self.curve = private_key.curve if self.curve != private_key.curve: raise InvalidCurveError("Curve mismatch.") self.private_key = private_key return self.private_key.get_verifying_key() def load_private_key_bytes(self, private_key): """ Load private key from byte string. Uses current curve and checks if the provided key matches the curve of ECDH key agreement. Key loads via from_string method of SigningKey class :param private_key: private key in bytes string format :type private_key: :term:`bytes-like object` :raises NoCurveError: Curve must be set before loading. :return: public (verifying) key from this private key. :rtype: VerifyingKey """ if not self.curve: raise NoCurveError("Curve must be set prior to key load.") return self.load_private_key( SigningKey.from_string(private_key, curve=self.curve) ) def load_private_key_der(self, private_key_der): """ Load private key from DER byte string. Compares the curve of the DER-encoded key with the ECDH set curve, uses the former if unset. Note, the only DER format supported is the RFC5915 Look at keys.py:SigningKey.from_der() :param private_key_der: string with the DER encoding of private ECDSA key :type private_key_der: string :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. :rtype: VerifyingKey """ return self.load_private_key(SigningKey.from_der(private_key_der)) def load_private_key_pem(self, private_key_pem): """ Load private key from PEM string. Compares the curve of the DER-encoded key with the ECDH set curve, uses the former if unset. Note, the only PEM format supported is the RFC5915 Look at keys.py:SigningKey.from_pem() it needs to have `EC PRIVATE KEY` section :param private_key_pem: string with PEM-encoded private ECDSA key :type private_key_pem: string :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. :rtype: VerifyingKey """ return self.load_private_key(SigningKey.from_pem(private_key_pem)) def get_public_key(self): """ Provides a public key that matches the local private key. Needs to be sent to the remote party. :return: public (verifying) key from local private key. :rtype: VerifyingKey """ return self.private_key.get_verifying_key() def load_received_public_key(self, public_key): """ Load public key from VerifyingKey (keys.py) object. Needs to have the same curve as set as current for ecdh operation. If curve is not set - it sets it from VerifyingKey. :param public_key: Initialised VerifyingKey class :type public_key: VerifyingKey :raises InvalidCurveError: public_key curve not the same as self.curve """ if not self.curve: self.curve = public_key.curve if self.curve != public_key.curve: raise InvalidCurveError("Curve mismatch.") self.public_key = public_key def load_received_public_key_bytes( self, public_key_str, valid_encodings=None ): """ Load public key from byte string. Uses current curve and checks if key length corresponds to the current curve. Key loads via from_string method of VerifyingKey class :param public_key_str: public key in bytes string format :type public_key_str: :term:`bytes-like object` :param valid_encodings: list of acceptable point encoding formats, supported ones are: :term:`uncompressed`, :term:`compressed`, :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` name). All formats by default (specified with ``None``). :type valid_encodings: :term:`set-like object` """ return self.load_received_public_key( VerifyingKey.from_string( public_key_str, self.curve, valid_encodings ) ) def load_received_public_key_der(self, public_key_der): """ Load public key from DER byte string. Compares the curve of the DER-encoded key with the ECDH set curve, uses the former if unset. Note, the only DER format supported is the RFC5912 Look at keys.py:VerifyingKey.from_der() :param public_key_der: string with the DER encoding of public ECDSA key :type public_key_der: string :raises InvalidCurveError: public_key curve not the same as self.curve """ return self.load_received_public_key( VerifyingKey.from_der(public_key_der) ) def load_received_public_key_pem(self, public_key_pem): """ Load public key from PEM string. Compares the curve of the PEM-encoded key with the ECDH set curve, uses the former if unset. Note, the only PEM format supported is the RFC5912 Look at keys.py:VerifyingKey.from_pem() :param public_key_pem: string with PEM-encoded public ECDSA key :type public_key_pem: string :raises InvalidCurveError: public_key curve not the same as self.curve """ return self.load_received_public_key( VerifyingKey.from_pem(public_key_pem) ) def generate_sharedsecret_bytes(self): """ Generate shared secret from local private key and remote public key. The objects needs to have both private key and received public key before generation is allowed. :raises InvalidCurveError: public_key curve not the same as self.curve :raises NoKeyError: public_key or private_key is not set :return: shared secret :rtype: bytes """ return number_to_string( self.generate_sharedsecret(), self.private_key.curve.curve.p() ) def generate_sharedsecret(self): """ Generate shared secret from local private key and remote public key. The objects needs to have both private key and received public key before generation is allowed. It's the same for local and remote party, shared secret(local private key, remote public key) == shared secret(local public key, remote private key) :raises InvalidCurveError: public_key curve not the same as self.curve :raises NoKeyError: public_key or private_key is not set :return: shared secret :rtype: int """ return self._get_shared_secret(self.public_key) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/ecdsa.py0000664005075200507520000006057314222604175015566 0ustar00hkariohkario#! /usr/bin/env python """ Low level implementation of Elliptic-Curve Digital Signatures. .. note :: You're most likely looking for the :py:class:`~ecdsa.keys` module. This is a low-level implementation of the ECDSA that operates on integers, not byte strings. NOTE: This a low level implementation of ECDSA, for normal applications you should be looking at the keys.py module. Classes and methods for elliptic-curve signatures: private keys, public keys, signatures, and definitions of prime-modulus curves. Example: .. code-block:: python # (In real-life applications, you would probably want to # protect against defects in SystemRandom.) from random import SystemRandom randrange = SystemRandom().randrange # Generate a public/private key pair using the NIST Curve P-192: g = generator_192 n = g.order() secret = randrange( 1, n ) pubkey = Public_key( g, g * secret ) privkey = Private_key( pubkey, secret ) # Signing a hash value: hash = randrange( 1, n ) signature = privkey.sign( hash, randrange( 1, n ) ) # Verifying a signature for a hash value: if pubkey.verifies( hash, signature ): print_("Demo verification succeeded.") else: print_("*** Demo verification failed.") # Verification fails if the hash value is modified: if pubkey.verifies( hash-1, signature ): print_("**** Demo verification failed to reject tampered hash.") else: print_("Demo verification correctly rejected tampered hash.") Revision history: 2005.12.31 - Initial version. 2008.11.25 - Substantial revisions introducing new classes. 2009.05.16 - Warn against using random.randrange in real applications. 2009.05.17 - Use random.SystemRandom by default. Originally written in 2005 by Peter Pearson and placed in the public domain, modified as part of the python-ecdsa package. """ from six import int2byte, b from . import ellipticcurve from . import numbertheory from .util import bit_length from ._compat import remove_whitespace class RSZeroError(RuntimeError): pass class InvalidPointError(RuntimeError): pass class Signature(object): """ ECDSA signature. :ivar int r: the ``r`` element of the ECDSA signature :ivar int s: the ``s`` element of the ECDSA signature """ def __init__(self, r, s): self.r = r self.s = s def recover_public_keys(self, hash, generator): """ Returns two public keys for which the signature is valid :param int hash: signed hash :param AbstractPoint generator: is the generator used in creation of the signature :rtype: tuple(Public_key, Public_key) :return: a pair of public keys that can validate the signature """ curve = generator.curve() n = generator.order() r = self.r s = self.s e = hash x = r # Compute the curve point with x as x-coordinate alpha = ( pow(x, 3, curve.p()) + (curve.a() * x) + curve.b() ) % curve.p() beta = numbertheory.square_root_mod_prime(alpha, curve.p()) y = beta if beta % 2 == 0 else curve.p() - beta # Compute the public key R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n) Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator) Pk1 = Public_key(generator, Q1) # And the second solution R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n) Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator) Pk2 = Public_key(generator, Q2) return [Pk1, Pk2] class Public_key(object): """Public key for ECDSA.""" def __init__(self, generator, point, verify=True): """Low level ECDSA public key object. :param generator: the Point that generates the group (the base point) :param point: the Point that defines the public key :param bool verify: if True check if point is valid point on curve :raises InvalidPointError: if the point parameters are invalid or point does not lay on the curve """ self.curve = generator.curve() self.generator = generator self.point = point n = generator.order() p = self.curve.p() if not (0 <= point.x() < p) or not (0 <= point.y() < p): raise InvalidPointError( "The public point has x or y out of range." ) if verify and not self.curve.contains_point(point.x(), point.y()): raise InvalidPointError("Point does not lay on the curve") if not n: raise InvalidPointError("Generator point must have order.") # for curve parameters with base point with cofactor 1, all points # that are on the curve are scalar multiples of the base point, so # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2 if ( verify and self.curve.cofactor() != 1 and not n * point == ellipticcurve.INFINITY ): raise InvalidPointError("Generator point order is bad.") def __eq__(self, other): """Return True if the keys are identical, False otherwise. Note: for comparison, only placement on the same curve and point equality is considered, use of the same generator point is not considered. """ if isinstance(other, Public_key): return self.curve == other.curve and self.point == other.point return NotImplemented def __ne__(self, other): """Return False if the keys are identical, True otherwise.""" return not self == other def verifies(self, hash, signature): """Verify that signature is a valid signature of hash. Return True if the signature is valid. """ # From X9.62 J.3.1. G = self.generator n = G.order() r = signature.r s = signature.s if r < 1 or r > n - 1: return False if s < 1 or s > n - 1: return False c = numbertheory.inverse_mod(s, n) u1 = (hash * c) % n u2 = (r * c) % n if hasattr(G, "mul_add"): xy = G.mul_add(u1, self.point, u2) else: xy = u1 * G + u2 * self.point v = xy.x() % n return v == r class Private_key(object): """Private key for ECDSA.""" def __init__(self, public_key, secret_multiplier): """public_key is of class Public_key; secret_multiplier is a large integer. """ self.public_key = public_key self.secret_multiplier = secret_multiplier def __eq__(self, other): """Return True if the points are identical, False otherwise.""" if isinstance(other, Private_key): return ( self.public_key == other.public_key and self.secret_multiplier == other.secret_multiplier ) return NotImplemented def __ne__(self, other): """Return False if the points are identical, True otherwise.""" return not self == other def sign(self, hash, random_k): """Return a signature for the provided hash, using the provided random nonce. It is absolutely vital that random_k be an unpredictable number in the range [1, self.public_key.point.order()-1]. If an attacker can guess random_k, he can compute our private key from a single signature. Also, if an attacker knows a few high-order bits (or a few low-order bits) of random_k, he can compute our private key from many signatures. The generation of nonces with adequate cryptographic strength is very difficult and far beyond the scope of this comment. May raise RuntimeError, in which case retrying with a new random value k is in order. """ G = self.public_key.generator n = G.order() k = random_k % n # Fix the bit-length of the random nonce, # so that it doesn't leak via timing. # This does not change that ks = k mod n ks = k + n kt = ks + n if bit_length(ks) == bit_length(n): p1 = kt * G else: p1 = ks * G r = p1.x() % n if r == 0: raise RSZeroError("amazingly unlucky random number r") s = ( numbertheory.inverse_mod(k, n) * (hash + (self.secret_multiplier * r) % n) ) % n if s == 0: raise RSZeroError("amazingly unlucky random number s") return Signature(r, s) def int_to_string(x): """Convert integer x into a string of bytes, as per X9.62.""" assert x >= 0 if x == 0: return b("\0") result = [] while x: ordinal = x & 0xFF result.append(int2byte(ordinal)) x >>= 8 result.reverse() return b("").join(result) def string_to_int(s): """Convert a string of bytes into an integer, as per X9.62.""" result = 0 for c in s: if not isinstance(c, int): c = ord(c) result = 256 * result + c return result def digest_integer(m): """Convert an integer into a string of bytes, compute its SHA-1 hash, and convert the result to an integer.""" # # I don't expect this function to be used much. I wrote # it in order to be able to duplicate the examples # in ECDSAVS. # from hashlib import sha1 return string_to_int(sha1(int_to_string(m)).digest()) def point_is_valid(generator, x, y): """Is (x,y) a valid public key based on the specified generator?""" # These are the tests specified in X9.62. n = generator.order() curve = generator.curve() p = curve.p() if not (0 <= x < p) or not (0 <= y < p): return False if not curve.contains_point(x, y): return False if ( curve.cofactor() != 1 and not n * ellipticcurve.PointJacobi(curve, x, y, 1) == ellipticcurve.INFINITY ): return False return True # secp112r1 curve _p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) # s = 00F50B02 8E4D696E 67687561 51752904 72783FB1 _a = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD2088"), 16) _b = int(remove_whitespace("659E F8BA0439 16EEDE89 11702B22"), 16) _Gx = int(remove_whitespace("09487239 995A5EE7 6B55F9C2 F098"), 16) _Gy = int(remove_whitespace("A89C E5AF8724 C0A23E0E 0FF77500"), 16) _r = int(remove_whitespace("DB7C 2ABF62E3 5E7628DF AC6561C5"), 16) _h = 1 curve_112r1 = ellipticcurve.CurveFp(_p, _a, _b, _h) generator_112r1 = ellipticcurve.PointJacobi( curve_112r1, _Gx, _Gy, 1, _r, generator=True ) # secp112r2 curve _p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) # s = 022757A1 114D69E 67687561 51755316 C05E0BD4 _a = int(remove_whitespace("6127 C24C05F3 8A0AAAF6 5C0EF02C"), 16) _b = int(remove_whitespace("51DE F1815DB5 ED74FCC3 4C85D709"), 16) _Gx = int(remove_whitespace("4BA30AB5 E892B4E1 649DD092 8643"), 16) _Gy = int(remove_whitespace("ADCD 46F5882E 3747DEF3 6E956E97"), 16) _r = int(remove_whitespace("36DF 0AAFD8B8 D7597CA1 0520D04B"), 16) _h = 4 curve_112r2 = ellipticcurve.CurveFp(_p, _a, _b, _h) generator_112r2 = ellipticcurve.PointJacobi( curve_112r2, _Gx, _Gy, 1, _r, generator=True ) # secp128r1 curve _p = int(remove_whitespace("FFFFFFFD FFFFFFFF FFFFFFFF FFFFFFFF"), 16) # S = 000E0D4D 69E6768 75615175 0CC03A44 73D03679 # a and b are mod p, so a is equal to p-3, or simply -3 # _a = -3 _b = int(remove_whitespace("E87579C1 1079F43D D824993C 2CEE5ED3"), 16) _Gx = int(remove_whitespace("161FF752 8B899B2D 0C28607C A52C5B86"), 16) _Gy = int(remove_whitespace("CF5AC839 5BAFEB13 C02DA292 DDED7A83"), 16) _r = int(remove_whitespace("FFFFFFFE 00000000 75A30D1B 9038A115"), 16) _h = 1 curve_128r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) generator_128r1 = ellipticcurve.PointJacobi( curve_128r1, _Gx, _Gy, 1, _r, generator=True ) # secp160r1 _p = int(remove_whitespace("FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF 7FFFFFFF"), 16) # S = 1053CDE4 2C14D696 E6768756 1517533B F3F83345 # a and b are mod p, so a is equal to p-3, or simply -3 # _a = -3 _b = int(remove_whitespace("1C97BEFC 54BD7A8B 65ACF89F 81D4D4AD C565FA45"), 16) _Gx = int( remove_whitespace("4A96B568 8EF57328 46646989 68C38BB9 13CBFC82"), 16, ) _Gy = int( remove_whitespace("23A62855 3168947D 59DCC912 04235137 7AC5FB32"), 16, ) _r = int( remove_whitespace("01 00000000 00000000 0001F4C8 F927AED3 CA752257"), 16, ) _h = 1 curve_160r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) generator_160r1 = ellipticcurve.PointJacobi( curve_160r1, _Gx, _Gy, 1, _r, generator=True ) # NIST Curve P-192: _p = 6277101735386680763835789423207666416083908700390324961279 _r = 6277101735386680763835789423176059013767194773182842284081 # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L _b = int( remove_whitespace( """ 64210519 E59C80E7 0FA7E9AB 72243049 FEB8DEEC C146B9B1""" ), 16, ) _Gx = int( remove_whitespace( """ 188DA80E B03090F6 7CBF20EB 43A18800 F4FF0AFD 82FF1012""" ), 16, ) _Gy = int( remove_whitespace( """ 07192B95 FFC8DA78 631011ED 6B24CDD5 73F977A1 1E794811""" ), 16, ) curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1) generator_192 = ellipticcurve.PointJacobi( curve_192, _Gx, _Gy, 1, _r, generator=True ) # NIST Curve P-224: _p = int( remove_whitespace( """ 2695994666715063979466701508701963067355791626002630814351 0066298881""" ) ) _r = int( remove_whitespace( """ 2695994666715063979466701508701962594045780771442439172168 2722368061""" ) ) # s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L # c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL _b = int( remove_whitespace( """ B4050A85 0C04B3AB F5413256 5044B0B7 D7BFD8BA 270B3943 2355FFB4""" ), 16, ) _Gx = int( remove_whitespace( """ B70E0CBD 6BB4BF7F 321390B9 4A03C1D3 56C21122 343280D6 115C1D21""" ), 16, ) _Gy = int( remove_whitespace( """ BD376388 B5F723FB 4C22DFE6 CD4375A0 5A074764 44D58199 85007E34""" ), 16, ) curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1) generator_224 = ellipticcurve.PointJacobi( curve_224, _Gx, _Gy, 1, _r, generator=True ) # NIST Curve P-256: _p = int( remove_whitespace( """ 1157920892103562487626974469494075735300861434152903141955 33631308867097853951""" ) ) _r = int( remove_whitespace( """ 115792089210356248762697446949407573529996955224135760342 422259061068512044369""" ) ) # s = 0xc49d360886e704936a6678e1139d26b7819f7e90L # c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL _b = int( remove_whitespace( """ 5AC635D8 AA3A93E7 B3EBBD55 769886BC 651D06B0 CC53B0F6 3BCE3C3E 27D2604B""" ), 16, ) _Gx = int( remove_whitespace( """ 6B17D1F2 E12C4247 F8BCE6E5 63A440F2 77037D81 2DEB33A0 F4A13945 D898C296""" ), 16, ) _Gy = int( remove_whitespace( """ 4FE342E2 FE1A7F9B 8EE7EB4A 7C0F9E16 2BCE3357 6B315ECE CBB64068 37BF51F5""" ), 16, ) curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1) generator_256 = ellipticcurve.PointJacobi( curve_256, _Gx, _Gy, 1, _r, generator=True ) # NIST Curve P-384: _p = int( remove_whitespace( """ 3940200619639447921227904010014361380507973927046544666794 8293404245721771496870329047266088258938001861606973112319""" ) ) _r = int( remove_whitespace( """ 3940200619639447921227904010014361380507973927046544666794 6905279627659399113263569398956308152294913554433653942643""" ) ) # s = 0xa335926aa319a27a1d00896a6773a4827acdac73L # c = int(remove_whitespace( # """ # 79d1e655 f868f02f ff48dcde e14151dd b80643c1 406d0ca1 # 0dfe6fc5 2009540a 495e8042 ea5f744f 6e184667 cc722483""" # ), 16) _b = int( remove_whitespace( """ B3312FA7 E23EE7E4 988E056B E3F82D19 181D9C6E FE814112 0314088F 5013875A C656398D 8A2ED19D 2A85C8ED D3EC2AEF""" ), 16, ) _Gx = int( remove_whitespace( """ AA87CA22 BE8B0537 8EB1C71E F320AD74 6E1D3B62 8BA79B98 59F741E0 82542A38 5502F25D BF55296C 3A545E38 72760AB7""" ), 16, ) _Gy = int( remove_whitespace( """ 3617DE4A 96262C6F 5D9E98BF 9292DC29 F8F41DBD 289A147C E9DA3113 B5F0B8C0 0A60B1CE 1D7E819D 7A431D7C 90EA0E5F""" ), 16, ) curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1) generator_384 = ellipticcurve.PointJacobi( curve_384, _Gx, _Gy, 1, _r, generator=True ) # NIST Curve P-521: _p = int( "686479766013060971498190079908139321726943530014330540939" "446345918554318339765605212255964066145455497729631139148" "0858037121987999716643812574028291115057151" ) _r = int( "686479766013060971498190079908139321726943530014330540939" "446345918554318339765539424505774633321719753296399637136" "3321113864768612440380340372808892707005449" ) # s = 0xd09e8800291cb85396cc6717393284aaa0da64baL # c = int(remove_whitespace( # """ # 0b4 8bfa5f42 0a349495 39d2bdfc 264eeeeb 077688e4 # 4fbf0ad8 f6d0edb3 7bd6b533 28100051 8e19f1b9 ffbe0fe9 # ed8a3c22 00b8f875 e523868c 70c1e5bf 55bad637""" # ), 16) _b = int( remove_whitespace( """ 051 953EB961 8E1C9A1F 929A21A0 B68540EE A2DA725B 99B315F3 B8B48991 8EF109E1 56193951 EC7E937B 1652C0BD 3BB1BF07 3573DF88 3D2C34F1 EF451FD4 6B503F00""" ), 16, ) _Gx = int( remove_whitespace( """ C6 858E06B7 0404E9CD 9E3ECB66 2395B442 9C648139 053FB521 F828AF60 6B4D3DBA A14B5E77 EFE75928 FE1DC127 A2FFA8DE 3348B3C1 856A429B F97E7E31 C2E5BD66""" ), 16, ) _Gy = int( remove_whitespace( """ 118 39296A78 9A3BC004 5C8A5FB4 2C7D1BD9 98F54449 579B4468 17AFBD17 273E662C 97EE7299 5EF42640 C550B901 3FAD0761 353C7086 A272C240 88BE9476 9FD16650""" ), 16, ) curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1) generator_521 = ellipticcurve.PointJacobi( curve_521, _Gx, _Gy, 1, _r, generator=True ) # Certicom secp256-k1 _a = 0x0000000000000000000000000000000000000000000000000000000000000000 _b = 0x0000000000000000000000000000000000000000000000000000000000000007 _p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F _Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 _Gy = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 _r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_secp256k1 = ellipticcurve.PointJacobi( curve_secp256k1, _Gx, _Gy, 1, _r, generator=True ) # Brainpool P-160-r1 _a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300 _b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58 _p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F _Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3 _Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321 _q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp160r1 = ellipticcurve.PointJacobi( curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-192-r1 _a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF _b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9 _p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297 _Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6 _Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F _q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp192r1 = ellipticcurve.PointJacobi( curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-224-r1 _a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43 _b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B _p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF _Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D _Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD _q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp224r1 = ellipticcurve.PointJacobi( curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-256-r1 _a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9 _b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6 _p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377 _Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262 _Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997 _q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp256r1 = ellipticcurve.PointJacobi( curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-320-r1 _a = int( remove_whitespace( """ 3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9 F492F375A97D860EB4""" ), 16, ) _b = int( remove_whitespace( """ 520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539 816F5EB4AC8FB1F1A6""" ), 16, ) _p = int( remove_whitespace( """ D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC 28FCD412B1F1B32E27""" ), 16, ) _Gx = int( remove_whitespace( """ 43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599 C710AF8D0D39E20611""" ), 16, ) _Gy = int( remove_whitespace( """ 14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6A C7D35245D1692E8EE1""" ), 16, ) _q = int( remove_whitespace( """ D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 E98691555B44C59311""" ), 16, ) curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp320r1 = ellipticcurve.PointJacobi( curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-384-r1 _a = int( remove_whitespace( """ 7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F9 0F8AA5814A503AD4EB04A8C7DD22CE2826""" ), 16, ) _b = int( remove_whitespace( """ 04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62 D57CB4390295DBC9943AB78696FA504C11""" ), 16, ) _p = int( remove_whitespace( """ 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 23ACD3A729901D1A71874700133107EC53""" ), 16, ) _Gx = int( remove_whitespace( """ 1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10 E8E826E03436D646AAEF87B2E247D4AF1E""" ), 16, ) _Gy = int( remove_whitespace( """ 8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF991292 80E4646217791811142820341263C5315""" ), 16, ) _q = int( remove_whitespace( """ 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 A7CF3AB6AF6B7FC3103B883202E9046565""" ), 16, ) curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp384r1 = ellipticcurve.PointJacobi( curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True ) # Brainpool P-512-r1 _a = int( remove_whitespace( """ 7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863 BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA""" ), 16, ) _b = int( remove_whitespace( """ 3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117 A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723""" ), 16, ) _p = int( remove_whitespace( """ AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3""" ), 16, ) _Gx = int( remove_whitespace( """ 81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D009 8EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822""" ), 16, ) _Gy = int( remove_whitespace( """ 7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F81 11B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892""" ), 16, ) _q = int( remove_whitespace( """ AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" ), 16, ) curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) generator_brainpoolp512r1 = ellipticcurve.PointJacobi( curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/eddsa.py0000664005075200507520000001600214221640203015542 0ustar00hkariohkario"""Implementation of Edwards Digital Signature Algorithm.""" import hashlib from ._sha3 import shake_256 from . import ellipticcurve from ._compat import ( remove_whitespace, bit_length, bytes_to_int, int_to_bytes, compat26_str, ) # edwards25519, defined in RFC7748 _p = 2**255 - 19 _a = -1 _d = int( remove_whitespace( "370957059346694393431380835087545651895421138798432190163887855330" "85940283555" ) ) _h = 8 _Gx = int( remove_whitespace( "151122213495354007725011514095885315114540126930418572060461132" "83949847762202" ) ) _Gy = int( remove_whitespace( "463168356949264781694283940034751631413079938662562256157830336" "03165251855960" ) ) _r = 2**252 + 0x14DEF9DEA2F79CD65812631A5CF5D3ED def _sha512(data): return hashlib.new("sha512", compat26_str(data)).digest() curve_ed25519 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _sha512) generator_ed25519 = ellipticcurve.PointEdwards( curve_ed25519, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True ) # edwards448, defined in RFC7748 _p = 2**448 - 2**224 - 1 _a = 1 _d = -39081 % _p _h = 4 _Gx = int( remove_whitespace( "224580040295924300187604334099896036246789641632564134246125461" "686950415467406032909029192869357953282578032075146446173674602635" "247710" ) ) _Gy = int( remove_whitespace( "298819210078481492676017930443930673437544040154080242095928241" "372331506189835876003536878655418784733982303233503462500531545062" "832660" ) ) _r = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D def _shake256(data): return shake_256(data, 114) curve_ed448 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _shake256) generator_ed448 = ellipticcurve.PointEdwards( curve_ed448, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True ) class PublicKey(object): """Public key for the Edwards Digital Signature Algorithm.""" def __init__(self, generator, public_key, public_point=None): self.generator = generator self.curve = generator.curve() self.__encoded = public_key # plus one for the sign bit and round up self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 if len(public_key) != self.baselen: raise ValueError( "Incorrect size of the public key, expected: {0} bytes".format( self.baselen ) ) if public_point: self.__point = public_point else: self.__point = ellipticcurve.PointEdwards.from_bytes( self.curve, public_key ) def __eq__(self, other): if isinstance(other, PublicKey): return ( self.curve == other.curve and self.__encoded == other.__encoded ) return NotImplemented def __ne__(self, other): return not self == other @property def point(self): return self.__point @point.setter def point(self, other): if self.__point != other: raise ValueError("Can't change the coordinates of the point") self.__point = other def public_point(self): return self.__point def public_key(self): return self.__encoded def verify(self, data, signature): """Verify a Pure EdDSA signature over data.""" data = compat26_str(data) if len(signature) != 2 * self.baselen: raise ValueError( "Invalid signature length, expected: {0} bytes".format( 2 * self.baselen ) ) R = ellipticcurve.PointEdwards.from_bytes( self.curve, signature[: self.baselen] ) S = bytes_to_int(signature[self.baselen :], "little") if S >= self.generator.order(): raise ValueError("Invalid signature") dom = bytearray() if self.curve == curve_ed448: dom = bytearray(b"SigEd448" + b"\x00\x00") k = bytes_to_int( self.curve.hash_func(dom + R.to_bytes() + self.__encoded + data), "little", ) if self.generator * S != self.__point * k + R: raise ValueError("Invalid signature") return True class PrivateKey(object): """Private key for the Edwards Digital Signature Algorithm.""" def __init__(self, generator, private_key): self.generator = generator self.curve = generator.curve() # plus one for the sign bit and round up self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 if len(private_key) != self.baselen: raise ValueError( "Incorrect size of private key, expected: {0} bytes".format( self.baselen ) ) self.__private_key = bytes(private_key) self.__h = bytearray(self.curve.hash_func(private_key)) self.__public_key = None a = self.__h[: self.baselen] a = self._key_prune(a) scalar = bytes_to_int(a, "little") self.__s = scalar @property def private_key(self): return self.__private_key def __eq__(self, other): if isinstance(other, PrivateKey): return ( self.curve == other.curve and self.__private_key == other.__private_key ) return NotImplemented def __ne__(self, other): return not self == other def _key_prune(self, key): # make sure the key is not in a small subgroup h = self.curve.cofactor() if h == 4: h_log = 2 elif h == 8: h_log = 3 else: raise ValueError("Only cofactor 4 and 8 curves supported") key[0] &= ~((1 << h_log) - 1) # ensure the highest bit is set but no higher l = bit_length(self.curve.p()) if l % 8 == 0: key[-1] = 0 key[-2] |= 0x80 else: key[-1] = key[-1] & (1 << (l % 8)) - 1 | 1 << (l % 8) - 1 return key def public_key(self): """Generate the public key based on the included private key""" if self.__public_key: return self.__public_key public_point = self.generator * self.__s self.__public_key = PublicKey( self.generator, public_point.to_bytes(), public_point ) return self.__public_key def sign(self, data): """Perform a Pure EdDSA signature over data.""" data = compat26_str(data) A = self.public_key().public_key() prefix = self.__h[self.baselen :] dom = bytearray() if self.curve == curve_ed448: dom = bytearray(b"SigEd448" + b"\x00\x00") r = bytes_to_int(self.curve.hash_func(dom + prefix + data), "little") R = (self.generator * r).to_bytes() k = bytes_to_int(self.curve.hash_func(dom + R + A + data), "little") k %= self.generator.order() S = (r + k * self.__s) % self.generator.order() return R + int_to_bytes(S, self.baselen, "little") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/src/ecdsa/ellipticcurve.py0000664005075200507520000015057114262276027017364 0ustar00hkariohkario#! /usr/bin/env python # -*- coding: utf-8 -*- # # Implementation of elliptic curves, for cryptographic applications. # # This module doesn't provide any way to choose a random elliptic # curve, nor to verify that an elliptic curve was chosen randomly, # because one can simply use NIST's standard curves. # # Notes from X9.62-1998 (draft): # Nomenclature: # - Q is a public key. # The "Elliptic Curve Domain Parameters" include: # - q is the "field size", which in our case equals p. # - p is a big prime. # - G is a point of prime order (5.1.1.1). # - n is the order of G (5.1.1.1). # Public-key validation (5.2.2): # - Verify that Q is not the point at infinity. # - Verify that X_Q and Y_Q are in [0,p-1]. # - Verify that Q is on the curve. # - Verify that nQ is the point at infinity. # Signature generation (5.3): # - Pick random k from [1,n-1]. # Signature checking (5.4.2): # - Verify that r and s are in [1,n-1]. # # Revision history: # 2005.12.31 - Initial version. # 2008.11.25 - Change CurveFp.is_on to contains_point. # # Written in 2005 by Peter Pearson and placed in the public domain. # Modified extensively as part of python-ecdsa. from __future__ import division try: from gmpy2 import mpz GMPY = True except ImportError: # pragma: no branch try: from gmpy import mpz GMPY = True except ImportError: GMPY = False from six import python_2_unicode_compatible from . import numbertheory from ._compat import normalise_bytes, int_to_bytes, bit_length, bytes_to_int from .errors import MalformedPointError from .util import orderlen, string_to_number, number_to_string @python_2_unicode_compatible class CurveFp(object): """ :term:`Short Weierstrass Elliptic Curve ` over a prime field. """ if GMPY: # pragma: no branch def __init__(self, p, a, b, h=None): """ The curve of points satisfying y^2 = x^3 + a*x + b (mod p). h is an integer that is the cofactor of the elliptic curve domain parameters; it is the number of points satisfying the elliptic curve equation divided by the order of the base point. It is used for selection of efficient algorithm for public point verification. """ self.__p = mpz(p) self.__a = mpz(a) self.__b = mpz(b) # h is not used in calculations and it can be None, so don't use # gmpy with it self.__h = h else: # pragma: no branch def __init__(self, p, a, b, h=None): """ The curve of points satisfying y^2 = x^3 + a*x + b (mod p). h is an integer that is the cofactor of the elliptic curve domain parameters; it is the number of points satisfying the elliptic curve equation divided by the order of the base point. It is used for selection of efficient algorithm for public point verification. """ self.__p = p self.__a = a self.__b = b self.__h = h def __eq__(self, other): """Return True if other is an identical curve, False otherwise. Note: the value of the cofactor of the curve is not taken into account when comparing curves, as it's derived from the base point and intrinsic curve characteristic (but it's complex to compute), only the prime and curve parameters are considered. """ if isinstance(other, CurveFp): p = self.__p return ( self.__p == other.__p and self.__a % p == other.__a % p and self.__b % p == other.__b % p ) return NotImplemented def __ne__(self, other): """Return False if other is an identical curve, True otherwise.""" return not self == other def __hash__(self): return hash((self.__p, self.__a, self.__b)) def p(self): return self.__p def a(self): return self.__a def b(self): return self.__b def cofactor(self): return self.__h def contains_point(self, x, y): """Is the point (x,y) on this curve?""" return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0 def __str__(self): return "CurveFp(p=%d, a=%d, b=%d, h=%d)" % ( self.__p, self.__a, self.__b, self.__h, ) class CurveEdTw(object): """Parameters for a Twisted Edwards Elliptic Curve""" if GMPY: # pragma: no branch def __init__(self, p, a, d, h=None, hash_func=None): """ The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). h is the cofactor of the curve. hash_func is the hash function associated with the curve (like SHA-512 for Ed25519) """ self.__p = mpz(p) self.__a = mpz(a) self.__d = mpz(d) self.__h = h self.__hash_func = hash_func else: def __init__(self, p, a, d, h=None, hash_func=None): """ The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). h is the cofactor of the curve. hash_func is the hash function associated with the curve (like SHA-512 for Ed25519) """ self.__p = p self.__a = a self.__d = d self.__h = h self.__hash_func = hash_func def __eq__(self, other): """Returns True if other is an identical curve.""" if isinstance(other, CurveEdTw): p = self.__p return ( self.__p == other.__p and self.__a % p == other.__a % p and self.__d % p == other.__d % p ) return NotImplemented def __ne__(self, other): """Return False if the other is an identical curve, True otherwise.""" return not self == other def __hash__(self): return hash((self.__p, self.__a, self.__d)) def contains_point(self, x, y): """Is the point (x, y) on this curve?""" return ( self.__a * x * x + y * y - 1 - self.__d * x * x * y * y ) % self.__p == 0 def p(self): return self.__p def a(self): return self.__a def d(self): return self.__d def hash_func(self, data): return self.__hash_func(data) def cofactor(self): return self.__h def __str__(self): return "CurveEdTw(p={0}, a={1}, d={2}, h={3})".format( self.__p, self.__a, self.__d, self.__h, ) class AbstractPoint(object): """Class for common methods of elliptic curve points.""" @staticmethod def _from_raw_encoding(data, raw_encoding_length): """ Decode public point from :term:`raw encoding`. :term:`raw encoding` is the same as the :term:`uncompressed` encoding, but without the 0x04 byte at the beginning. """ # real assert, from_bytes() should not call us with different length assert len(data) == raw_encoding_length xs = data[: raw_encoding_length // 2] ys = data[raw_encoding_length // 2 :] # real assert, raw_encoding_length is calculated by multiplying an # integer by two so it will always be even assert len(xs) == raw_encoding_length // 2 assert len(ys) == raw_encoding_length // 2 coord_x = string_to_number(xs) coord_y = string_to_number(ys) return coord_x, coord_y @staticmethod def _from_compressed(data, curve): """Decode public point from compressed encoding.""" if data[:1] not in (b"\x02", b"\x03"): raise MalformedPointError("Malformed compressed point encoding") is_even = data[:1] == b"\x02" x = string_to_number(data[1:]) p = curve.p() alpha = (pow(x, 3, p) + (curve.a() * x) + curve.b()) % p try: beta = numbertheory.square_root_mod_prime(alpha, p) except numbertheory.Error as e: raise MalformedPointError( "Encoding does not correspond to a point on curve", e ) if is_even == bool(beta & 1): y = p - beta else: y = beta return x, y @classmethod def _from_hybrid(cls, data, raw_encoding_length, validate_encoding): """Decode public point from hybrid encoding.""" # real assert, from_bytes() should not call us with different types assert data[:1] in (b"\x06", b"\x07") # primarily use the uncompressed as it's easiest to handle x, y = cls._from_raw_encoding(data[1:], raw_encoding_length) # but validate if it's self-consistent if we're asked to do that if validate_encoding and ( y & 1 and data[:1] != b"\x07" or (not y & 1) and data[:1] != b"\x06" ): raise MalformedPointError("Inconsistent hybrid point encoding") return x, y @classmethod def _from_edwards(cls, curve, data): """Decode a point on an Edwards curve.""" data = bytearray(data) p = curve.p() # add 1 for the sign bit and then round up exp_len = (bit_length(p) + 1 + 7) // 8 if len(data) != exp_len: raise MalformedPointError("Point length doesn't match the curve.") x_0 = (data[-1] & 0x80) >> 7 data[-1] &= 0x80 - 1 y = bytes_to_int(data, "little") if GMPY: y = mpz(y) x2 = ( (y * y - 1) * numbertheory.inverse_mod(curve.d() * y * y - curve.a(), p) % p ) try: x = numbertheory.square_root_mod_prime(x2, p) except numbertheory.Error as e: raise MalformedPointError( "Encoding does not correspond to a point on curve", e ) if x % 2 != x_0: x = -x % p return x, y @classmethod def from_bytes( cls, curve, data, validate_encoding=True, valid_encodings=None ): """ Initialise the object from byte encoding of a point. The method does accept and automatically detect the type of point encoding used. It supports the :term:`raw encoding`, :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. Note: generally you will want to call the ``from_bytes()`` method of either a child class, PointJacobi or Point. :param data: single point encoding of the public key :type data: :term:`bytes-like object` :param curve: the curve on which the public key is expected to lay :type curve: ~ecdsa.ellipticcurve.CurveFp :param validate_encoding: whether to verify that the encoding of the point is self-consistent, defaults to True, has effect only on ``hybrid`` encoding :type validate_encoding: bool :param valid_encodings: list of acceptable point encoding formats, supported ones are: :term:`uncompressed`, :term:`compressed`, :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` name). All formats by default (specified with ``None``). :type valid_encodings: :term:`set-like object` :raises `~ecdsa.errors.MalformedPointError`: if the public point does not lay on the curve or the encoding is invalid :return: x and y coordinates of the encoded point :rtype: tuple(int, int) """ if not valid_encodings: valid_encodings = set( ["uncompressed", "compressed", "hybrid", "raw"] ) if not all( i in set(("uncompressed", "compressed", "hybrid", "raw")) for i in valid_encodings ): raise ValueError( "Only uncompressed, compressed, hybrid or raw encoding " "supported." ) data = normalise_bytes(data) if isinstance(curve, CurveEdTw): return cls._from_edwards(curve, data) key_len = len(data) raw_encoding_length = 2 * orderlen(curve.p()) if key_len == raw_encoding_length and "raw" in valid_encodings: coord_x, coord_y = cls._from_raw_encoding( data, raw_encoding_length ) elif key_len == raw_encoding_length + 1 and ( "hybrid" in valid_encodings or "uncompressed" in valid_encodings ): if data[:1] in (b"\x06", b"\x07") and "hybrid" in valid_encodings: coord_x, coord_y = cls._from_hybrid( data, raw_encoding_length, validate_encoding ) elif data[:1] == b"\x04" and "uncompressed" in valid_encodings: coord_x, coord_y = cls._from_raw_encoding( data[1:], raw_encoding_length ) else: raise MalformedPointError( "Invalid X9.62 encoding of the public point" ) elif ( key_len == raw_encoding_length // 2 + 1 and "compressed" in valid_encodings ): coord_x, coord_y = cls._from_compressed(data, curve) else: raise MalformedPointError( "Length of string does not match lengths of " "any of the enabled ({0}) encodings of the " "curve.".format(", ".join(valid_encodings)) ) return coord_x, coord_y def _raw_encode(self): """Convert the point to the :term:`raw encoding`.""" prime = self.curve().p() x_str = number_to_string(self.x(), prime) y_str = number_to_string(self.y(), prime) return x_str + y_str def _compressed_encode(self): """Encode the point into the compressed form.""" prime = self.curve().p() x_str = number_to_string(self.x(), prime) if self.y() & 1: return b"\x03" + x_str return b"\x02" + x_str def _hybrid_encode(self): """Encode the point into the hybrid form.""" raw_enc = self._raw_encode() if self.y() & 1: return b"\x07" + raw_enc return b"\x06" + raw_enc def _edwards_encode(self): """Encode the point according to RFC8032 encoding.""" self.scale() x, y, p = self.x(), self.y(), self.curve().p() # add 1 for the sign bit and then round up enc_len = (bit_length(p) + 1 + 7) // 8 y_str = int_to_bytes(y, enc_len, "little") if x % 2: y_str[-1] |= 0x80 return y_str def to_bytes(self, encoding="raw"): """ Convert the point to a byte string. The method by default uses the :term:`raw encoding` (specified by `encoding="raw"`. It can also output points in :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` formats. For points on Edwards curves `encoding` is ignored and only the encoding defined in RFC 8032 is supported. :return: :term:`raw encoding` of a public on the curve :rtype: bytes """ assert encoding in ("raw", "uncompressed", "compressed", "hybrid") curve = self.curve() if isinstance(curve, CurveEdTw): return self._edwards_encode() elif encoding == "raw": return self._raw_encode() elif encoding == "uncompressed": return b"\x04" + self._raw_encode() elif encoding == "hybrid": return self._hybrid_encode() else: return self._compressed_encode() @staticmethod def _naf(mult): """Calculate non-adjacent form of number.""" ret = [] while mult: if mult % 2: nd = mult % 4 if nd >= 2: nd -= 4 ret.append(nd) mult -= nd else: ret.append(0) mult //= 2 return ret class PointJacobi(AbstractPoint): """ Point on a short Weierstrass elliptic curve. Uses Jacobi coordinates. In Jacobian coordinates, there are three parameters, X, Y and Z. They correspond to affine parameters 'x' and 'y' like so: x = X / Z² y = Y / Z³ """ def __init__(self, curve, x, y, z, order=None, generator=False): """ Initialise a point that uses Jacobi representation internally. :param CurveFp curve: curve on which the point resides :param int x: the X parameter of Jacobi representation (equal to x when converting from affine coordinates :param int y: the Y parameter of Jacobi representation (equal to y when converting from affine coordinates :param int z: the Z parameter of Jacobi representation (equal to 1 when converting from affine coordinates :param int order: the point order, must be non zero when using generator=True :param bool generator: the point provided is a curve generator, as such, it will be commonly used with scalar multiplication. This will cause to precompute multiplication table generation for it """ super(PointJacobi, self).__init__() self.__curve = curve if GMPY: # pragma: no branch self.__coords = (mpz(x), mpz(y), mpz(z)) self.__order = order and mpz(order) else: # pragma: no branch self.__coords = (x, y, z) self.__order = order self.__generator = generator self.__precompute = [] @classmethod def from_bytes( cls, curve, data, validate_encoding=True, valid_encodings=None, order=None, generator=False, ): """ Initialise the object from byte encoding of a point. The method does accept and automatically detect the type of point encoding used. It supports the :term:`raw encoding`, :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. :param data: single point encoding of the public key :type data: :term:`bytes-like object` :param curve: the curve on which the public key is expected to lay :type curve: ~ecdsa.ellipticcurve.CurveFp :param validate_encoding: whether to verify that the encoding of the point is self-consistent, defaults to True, has effect only on ``hybrid`` encoding :type validate_encoding: bool :param valid_encodings: list of acceptable point encoding formats, supported ones are: :term:`uncompressed`, :term:`compressed`, :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` name). All formats by default (specified with ``None``). :type valid_encodings: :term:`set-like object` :param int order: the point order, must be non zero when using generator=True :param bool generator: the point provided is a curve generator, as such, it will be commonly used with scalar multiplication. This will cause to precompute multiplication table generation for it :raises `~ecdsa.errors.MalformedPointError`: if the public point does not lay on the curve or the encoding is invalid :return: Point on curve :rtype: PointJacobi """ coord_x, coord_y = super(PointJacobi, cls).from_bytes( curve, data, validate_encoding, valid_encodings ) return PointJacobi(curve, coord_x, coord_y, 1, order, generator) def _maybe_precompute(self): if not self.__generator or self.__precompute: return # since this code will execute just once, and it's fully deterministic, # depend on atomicity of the last assignment to switch from empty # self.__precompute to filled one and just ignore the unlikely # situation when two threads execute it at the same time (as it won't # lead to inconsistent __precompute) order = self.__order assert order precompute = [] i = 1 order *= 2 coord_x, coord_y, coord_z = self.__coords doubler = PointJacobi(self.__curve, coord_x, coord_y, coord_z, order) order *= 2 precompute.append((doubler.x(), doubler.y())) while i < order: i *= 2 doubler = doubler.double().scale() precompute.append((doubler.x(), doubler.y())) self.__precompute = precompute def __getstate__(self): # while this code can execute at the same time as _maybe_precompute() # is updating the __precompute or scale() is updating the __coords, # there is no requirement for consistency between __coords and # __precompute state = self.__dict__.copy() return state def __setstate__(self, state): self.__dict__.update(state) def __eq__(self, other): """Compare for equality two points with each-other. Note: only points that lay on the same curve can be equal. """ x1, y1, z1 = self.__coords if other is INFINITY: return not y1 or not z1 if isinstance(other, Point): x2, y2, z2 = other.x(), other.y(), 1 elif isinstance(other, PointJacobi): x2, y2, z2 = other.__coords else: return NotImplemented if self.__curve != other.curve(): return False p = self.__curve.p() zz1 = z1 * z1 % p zz2 = z2 * z2 % p # compare the fractions by bringing them to the same denominator # depend on short-circuit to save 4 multiplications in case of # inequality return (x1 * zz2 - x2 * zz1) % p == 0 and ( y1 * zz2 * z2 - y2 * zz1 * z1 ) % p == 0 def __ne__(self, other): """Compare for inequality two points with each-other.""" return not self == other def order(self): """Return the order of the point. None if it is undefined. """ return self.__order def curve(self): """Return curve over which the point is defined.""" return self.__curve def x(self): """ Return affine x coordinate. This method should be used only when the 'y' coordinate is not needed. It's computationally more efficient to use `to_affine()` and then call x() and y() on the returned instance. Or call `scale()` and then x() and y() on the returned instance. """ x, _, z = self.__coords if z == 1: return x p = self.__curve.p() z = numbertheory.inverse_mod(z, p) return x * z**2 % p def y(self): """ Return affine y coordinate. This method should be used only when the 'x' coordinate is not needed. It's computationally more efficient to use `to_affine()` and then call x() and y() on the returned instance. Or call `scale()` and then x() and y() on the returned instance. """ _, y, z = self.__coords if z == 1: return y p = self.__curve.p() z = numbertheory.inverse_mod(z, p) return y * z**3 % p def scale(self): """ Return point scaled so that z == 1. Modifies point in place, returns self. """ x, y, z = self.__coords if z == 1: return self # scaling is deterministic, so even if two threads execute the below # code at the same time, they will set __coords to the same value p = self.__curve.p() z_inv = numbertheory.inverse_mod(z, p) zz_inv = z_inv * z_inv % p x = x * zz_inv % p y = y * zz_inv * z_inv % p self.__coords = (x, y, 1) return self def to_affine(self): """Return point in affine form.""" _, y, z = self.__coords if not y or not z: return INFINITY self.scale() x, y, z = self.__coords return Point(self.__curve, x, y, self.__order) @staticmethod def from_affine(point, generator=False): """Create from an affine point. :param bool generator: set to True to make the point to precalculate multiplication table - useful for public point when verifying many signatures (around 100 or so) or for generator points of a curve. """ return PointJacobi( point.curve(), point.x(), point.y(), 1, point.order(), generator ) # please note that all the methods that use the equations from # hyperelliptic # are formatted in a way to maximise performance. # Things that make code faster: multiplying instead of taking to the power # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and # `pow(x, 4, p)`), # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is # faster than `x1 = self.x1; x2 = self.x2`), # similarly, sometimes the `% p` is skipped if it makes the calculation # faster and the result of calculation is later reduced modulo `p` def _double_with_z_1(self, X1, Y1, p, a): """Add a point to itself with z == 1.""" # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl XX, YY = X1 * X1 % p, Y1 * Y1 % p if not YY: return 0, 0, 1 YYYY = YY * YY % p S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p M = 3 * XX + a T = (M * M - 2 * S) % p # X3 = T Y3 = (M * (S - T) - 8 * YYYY) % p Z3 = 2 * Y1 % p return T, Y3, Z3 def _double(self, X1, Y1, Z1, p, a): """Add a point to itself, arbitrary z.""" if Z1 == 1: return self._double_with_z_1(X1, Y1, p, a) if not Y1 or not Z1: return 0, 0, 1 # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl XX, YY = X1 * X1 % p, Y1 * Y1 % p if not YY: return 0, 0, 1 YYYY = YY * YY % p ZZ = Z1 * Z1 % p S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p M = (3 * XX + a * ZZ * ZZ) % p T = (M * M - 2 * S) % p # X3 = T Y3 = (M * (S - T) - 8 * YYYY) % p Z3 = ((Y1 + Z1) ** 2 - YY - ZZ) % p return T, Y3, Z3 def double(self): """Add a point to itself.""" X1, Y1, Z1 = self.__coords if not Y1: return INFINITY p, a = self.__curve.p(), self.__curve.a() X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a) if not Y3 or not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def _add_with_z_1(self, X1, Y1, X2, Y2, p): """add points when both Z1 and Z2 equal 1""" # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl H = X2 - X1 HH = H * H I = 4 * HH % p J = H * I r = 2 * (Y2 - Y1) if not H and not r: return self._double_with_z_1(X1, Y1, p, self.__curve.a()) V = X1 * I X3 = (r**2 - J - 2 * V) % p Y3 = (r * (V - X3) - 2 * Y1 * J) % p Z3 = 2 * H % p return X3, Y3, Z3 def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p): """add points when Z1 == Z2""" # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m A = (X2 - X1) ** 2 % p B = X1 * A % p C = X2 * A D = (Y2 - Y1) ** 2 % p if not A and not D: return self._double(X1, Y1, Z1, p, self.__curve.a()) X3 = (D - B - C) % p Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p Z3 = Z1 * (X2 - X1) % p return X3, Y3, Z3 def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p): """add points when Z2 == 1""" # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl Z1Z1 = Z1 * Z1 % p U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p H = (U2 - X1) % p HH = H * H % p I = 4 * HH % p J = H * I r = 2 * (S2 - Y1) % p if not r and not H: return self._double_with_z_1(X2, Y2, p, self.__curve.a()) V = X1 * I X3 = (r * r - J - 2 * V) % p Y3 = (r * (V - X3) - 2 * Y1 * J) % p Z3 = ((Z1 + H) ** 2 - Z1Z1 - HH) % p return X3, Y3, Z3 def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p): """add points with arbitrary z""" # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl Z1Z1 = Z1 * Z1 % p Z2Z2 = Z2 * Z2 % p U1 = X1 * Z2Z2 % p U2 = X2 * Z1Z1 % p S1 = Y1 * Z2 * Z2Z2 % p S2 = Y2 * Z1 * Z1Z1 % p H = U2 - U1 I = 4 * H * H % p J = H * I % p r = 2 * (S2 - S1) % p if not H and not r: return self._double(X1, Y1, Z1, p, self.__curve.a()) V = U1 * I X3 = (r * r - J - 2 * V) % p Y3 = (r * (V - X3) - 2 * S1 * J) % p Z3 = ((Z1 + Z2) ** 2 - Z1Z1 - Z2Z2) * H % p return X3, Y3, Z3 def __radd__(self, other): """Add other to self.""" return self + other def _add(self, X1, Y1, Z1, X2, Y2, Z2, p): """add two points, select fastest method.""" if not Y1 or not Z1: return X2, Y2, Z2 if not Y2 or not Z2: return X1, Y1, Z1 if Z1 == Z2: if Z1 == 1: return self._add_with_z_1(X1, Y1, X2, Y2, p) return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p) if Z1 == 1: return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p) if Z2 == 1: return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p) return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p) def __add__(self, other): """Add two points on elliptic curve.""" if self == INFINITY: return other if other == INFINITY: return self if isinstance(other, Point): other = PointJacobi.from_affine(other) if self.__curve != other.__curve: raise ValueError("The other point is on different curve") p = self.__curve.p() X1, Y1, Z1 = self.__coords X2, Y2, Z2 = other.__coords X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p) if not Y3 or not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def __rmul__(self, other): """Multiply point by an integer.""" return self * other def _mul_precompute(self, other): """Multiply point by integer with precomputation table.""" X3, Y3, Z3, p = 0, 0, 1, self.__curve.p() _add = self._add for X2, Y2 in self.__precompute: if other % 2: if other % 4 >= 2: other = (other + 1) // 2 X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) else: other = (other - 1) // 2 X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) else: other //= 2 if not Y3 or not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def __mul__(self, other): """Multiply point by an integer.""" if not self.__coords[1] or not other: return INFINITY if other == 1: return self if self.__order: # order*2 as a protection for Minerva other = other % (self.__order * 2) self._maybe_precompute() if self.__precompute: return self._mul_precompute(other) self = self.scale() X2, Y2, _ = self.__coords X3, Y3, Z3 = 0, 0, 1 p, a = self.__curve.p(), self.__curve.a() _double = self._double _add = self._add # since adding points when at least one of them is scaled # is quicker, reverse the NAF order for i in reversed(self._naf(other)): X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) if i < 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) elif i > 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) if not Y3 or not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def mul_add(self, self_mul, other, other_mul): """ Do two multiplications at the same time, add results. calculates self*self_mul + other*other_mul """ if other == INFINITY or other_mul == 0: return self * self_mul if self_mul == 0: return other * other_mul if not isinstance(other, PointJacobi): other = PointJacobi.from_affine(other) # when the points have precomputed answers, then multiplying them alone # is faster (as it uses NAF and no point doublings) self._maybe_precompute() other._maybe_precompute() if self.__precompute and other.__precompute: return self * self_mul + other * other_mul if self.__order: self_mul = self_mul % self.__order other_mul = other_mul % self.__order # (X3, Y3, Z3) is the accumulator X3, Y3, Z3 = 0, 0, 1 p, a = self.__curve.p(), self.__curve.a() # as we have 6 unique points to work with, we can't scale all of them, # but do scale the ones that are used most often self.scale() X1, Y1, Z1 = self.__coords other.scale() X2, Y2, Z2 = other.__coords _double = self._double _add = self._add # with NAF we have 3 options: no add, subtract, add # so with 2 points, we have 9 combinations: # 0, -A, +A, -B, -A-B, +A-B, +B, -A+B, +A+B # so we need 4 combined points: mAmB_X, mAmB_Y, mAmB_Z = _add(X1, -Y1, Z1, X2, -Y2, Z2, p) pAmB_X, pAmB_Y, pAmB_Z = _add(X1, Y1, Z1, X2, -Y2, Z2, p) mApB_X, mApB_Y, mApB_Z = _add(X1, -Y1, Z1, X2, Y2, Z2, p) pApB_X, pApB_Y, pApB_Z = _add(X1, Y1, Z1, X2, Y2, Z2, p) # when the self and other sum to infinity, we need to add them # one by one to get correct result but as that's very unlikely to # happen in regular operation, we don't need to optimise this case if not pApB_Y or not pApB_Z: return self * self_mul + other * other_mul # gmp object creation has cumulatively higher overhead than the # speedup we get from calculating the NAF using gmp so ensure use # of int() self_naf = list(reversed(self._naf(int(self_mul)))) other_naf = list(reversed(self._naf(int(other_mul)))) # ensure that the lists are the same length (zip() will truncate # longer one otherwise) if len(self_naf) < len(other_naf): self_naf = [0] * (len(other_naf) - len(self_naf)) + self_naf elif len(self_naf) > len(other_naf): other_naf = [0] * (len(self_naf) - len(other_naf)) + other_naf for A, B in zip(self_naf, other_naf): X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) # conditions ordered from most to least likely if A == 0: if B == 0: pass elif B < 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, Z2, p) else: assert B > 0 X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, Z2, p) elif A < 0: if B == 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X1, -Y1, Z1, p) elif B < 0: X3, Y3, Z3 = _add(X3, Y3, Z3, mAmB_X, mAmB_Y, mAmB_Z, p) else: assert B > 0 X3, Y3, Z3 = _add(X3, Y3, Z3, mApB_X, mApB_Y, mApB_Z, p) else: assert A > 0 if B == 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, Z1, p) elif B < 0: X3, Y3, Z3 = _add(X3, Y3, Z3, pAmB_X, pAmB_Y, pAmB_Z, p) else: assert B > 0 X3, Y3, Z3 = _add(X3, Y3, Z3, pApB_X, pApB_Y, pApB_Z, p) if not Y3 or not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def __neg__(self): """Return negated point.""" x, y, z = self.__coords return PointJacobi(self.__curve, x, -y, z, self.__order) class Point(AbstractPoint): """A point on a short Weierstrass elliptic curve. Altering x and y is forbidden, but they can be read by the x() and y() methods.""" def __init__(self, curve, x, y, order=None): """curve, x, y, order; order (optional) is the order of this point.""" super(Point, self).__init__() self.__curve = curve if GMPY: self.__x = x and mpz(x) self.__y = y and mpz(y) self.__order = order and mpz(order) else: self.__x = x self.__y = y self.__order = order # self.curve is allowed to be None only for INFINITY: if self.__curve: assert self.__curve.contains_point(x, y) # for curves with cofactor 1, all points that are on the curve are # scalar multiples of the base point, so performing multiplication is # not necessary to verify that. See Section 3.2.2.1 of SEC 1 v2 if curve and curve.cofactor() != 1 and order: assert self * order == INFINITY @classmethod def from_bytes( cls, curve, data, validate_encoding=True, valid_encodings=None, order=None, ): """ Initialise the object from byte encoding of a point. The method does accept and automatically detect the type of point encoding used. It supports the :term:`raw encoding`, :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. :param data: single point encoding of the public key :type data: :term:`bytes-like object` :param curve: the curve on which the public key is expected to lay :type curve: ~ecdsa.ellipticcurve.CurveFp :param validate_encoding: whether to verify that the encoding of the point is self-consistent, defaults to True, has effect only on ``hybrid`` encoding :type validate_encoding: bool :param valid_encodings: list of acceptable point encoding formats, supported ones are: :term:`uncompressed`, :term:`compressed`, :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` name). All formats by default (specified with ``None``). :type valid_encodings: :term:`set-like object` :param int order: the point order, must be non zero when using generator=True :raises `~ecdsa.errors.MalformedPointError`: if the public point does not lay on the curve or the encoding is invalid :return: Point on curve :rtype: Point """ coord_x, coord_y = super(Point, cls).from_bytes( curve, data, validate_encoding, valid_encodings ) return Point(curve, coord_x, coord_y, order) def __eq__(self, other): """Return True if the points are identical, False otherwise. Note: only points that lay on the same curve can be equal. """ if isinstance(other, Point): return ( self.__curve == other.__curve and self.__x == other.__x and self.__y == other.__y ) return NotImplemented def __ne__(self, other): """Returns False if points are identical, True otherwise.""" return not self == other def __neg__(self): return Point(self.__curve, self.__x, self.__curve.p() - self.__y) def __add__(self, other): """Add one point to another point.""" # X9.62 B.3: if not isinstance(other, Point): return NotImplemented if other == INFINITY: return self if self == INFINITY: return other assert self.__curve == other.__curve if self.__x == other.__x: if (self.__y + other.__y) % self.__curve.p() == 0: return INFINITY else: return self.double() p = self.__curve.p() l = ( (other.__y - self.__y) * numbertheory.inverse_mod(other.__x - self.__x, p) ) % p x3 = (l * l - self.__x - other.__x) % p y3 = (l * (self.__x - x3) - self.__y) % p return Point(self.__curve, x3, y3) def __mul__(self, other): """Multiply a point by an integer.""" def leftmost_bit(x): assert x > 0 result = 1 while result <= x: result = 2 * result return result // 2 e = other if e == 0 or (self.__order and e % self.__order == 0): return INFINITY if self == INFINITY: return INFINITY if e < 0: return (-self) * (-e) # From X9.62 D.3.2: e3 = 3 * e negative_self = Point(self.__curve, self.__x, -self.__y, self.__order) i = leftmost_bit(e3) // 2 result = self # print_("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) while i > 1: result = result.double() if (e3 & i) != 0 and (e & i) == 0: result = result + self if (e3 & i) == 0 and (e & i) != 0: result = result + negative_self # print_(". . . i = %d, result = %s" % ( i, result )) i = i // 2 return result def __rmul__(self, other): """Multiply a point by an integer.""" return self * other def __str__(self): if self == INFINITY: return "infinity" return "(%d,%d)" % (self.__x, self.__y) def double(self): """Return a new point that is twice the old.""" if self == INFINITY: return INFINITY # X9.62 B.3: p = self.__curve.p() a = self.__curve.a() l = ( (3 * self.__x * self.__x + a) * numbertheory.inverse_mod(2 * self.__y, p) ) % p x3 = (l * l - 2 * self.__x) % p y3 = (l * (self.__x - x3) - self.__y) % p return Point(self.__curve, x3, y3) def x(self): return self.__x def y(self): return self.__y def curve(self): return self.__curve def order(self): return self.__order class PointEdwards(AbstractPoint): """Point on Twisted Edwards curve. Internally represents the coordinates on the curve using four parameters, X, Y, Z, T. They correspond to affine parameters 'x' and 'y' like so: x = X / Z y = Y / Z x*y = T / Z """ def __init__(self, curve, x, y, z, t, order=None, generator=False): """ Initialise a point that uses the extended coordinates internally. """ super(PointEdwards, self).__init__() self.__curve = curve if GMPY: # pragma: no branch self.__coords = (mpz(x), mpz(y), mpz(z), mpz(t)) self.__order = order and mpz(order) else: # pragma: no branch self.__coords = (x, y, z, t) self.__order = order self.__generator = generator self.__precompute = [] @classmethod def from_bytes( cls, curve, data, validate_encoding=None, valid_encodings=None, order=None, generator=False, ): """ Initialise the object from byte encoding of a point. `validate_encoding` and `valid_encodings` are provided for compatibility with Weierstrass curves, they are ignored for Edwards points. :param data: single point encoding of the public key :type data: :term:`bytes-like object` :param curve: the curve on which the public key is expected to lay :type curve: ecdsa.ellipticcurve.CurveEdTw :param None validate_encoding: Ignored, encoding is always validated :param None valid_encodings: Ignored, there is just one encoding supported :param int order: the point order, must be non zero when using generator=True :param bool generator: Flag to mark the point as a curve generator, this will cause the library to pre-compute some values to make repeated usages of the point much faster :raises `~ecdsa.errors.MalformedPointError`: if the public point does not lay on the curve or the encoding is invalid :return: Initialised point on an Edwards curve :rtype: PointEdwards """ coord_x, coord_y = super(PointEdwards, cls).from_bytes( curve, data, validate_encoding, valid_encodings ) return PointEdwards( curve, coord_x, coord_y, 1, coord_x * coord_y, order, generator ) def _maybe_precompute(self): if not self.__generator or self.__precompute: return self.__precompute # since this code will execute just once, and it's fully deterministic, # depend on atomicity of the last assignment to switch from empty # self.__precompute to filled one and just ignore the unlikely # situation when two threads execute it at the same time (as it won't # lead to inconsistent __precompute) order = self.__order assert order precompute = [] i = 1 order *= 2 coord_x, coord_y, coord_z, coord_t = self.__coords prime = self.__curve.p() doubler = PointEdwards( self.__curve, coord_x, coord_y, coord_z, coord_t, order ) # for "protection" against Minerva we need 1 or 2 more bits depending # on order bit size, but it's easier to just calculate one # point more always order *= 4 while i < order: doubler = doubler.scale() coord_x, coord_y = doubler.x(), doubler.y() coord_t = coord_x * coord_y % prime precompute.append((coord_x, coord_y, coord_t)) i *= 2 doubler = doubler.double() self.__precompute = precompute return self.__precompute def x(self): """Return affine x coordinate.""" X1, _, Z1, _ = self.__coords if Z1 == 1: return X1 p = self.__curve.p() z_inv = numbertheory.inverse_mod(Z1, p) return X1 * z_inv % p def y(self): """Return affine y coordinate.""" _, Y1, Z1, _ = self.__coords if Z1 == 1: return Y1 p = self.__curve.p() z_inv = numbertheory.inverse_mod(Z1, p) return Y1 * z_inv % p def curve(self): """Return the curve of the point.""" return self.__curve def order(self): return self.__order def scale(self): """ Return point scaled so that z == 1. Modifies point in place, returns self. """ X1, Y1, Z1, _ = self.__coords if Z1 == 1: return self p = self.__curve.p() z_inv = numbertheory.inverse_mod(Z1, p) x = X1 * z_inv % p y = Y1 * z_inv % p t = x * y % p self.__coords = (x, y, 1, t) return self def __eq__(self, other): """Compare for equality two points with each-other. Note: only points on the same curve can be equal. """ x1, y1, z1, t1 = self.__coords if other is INFINITY: return not x1 or not t1 if isinstance(other, PointEdwards): x2, y2, z2, t2 = other.__coords else: return NotImplemented if self.__curve != other.curve(): return False p = self.__curve.p() # cross multiply to eliminate divisions xn1 = x1 * z2 % p xn2 = x2 * z1 % p yn1 = y1 * z2 % p yn2 = y2 * z1 % p return xn1 == xn2 and yn1 == yn2 def __ne__(self, other): """Compare for inequality two points with each-other.""" return not self == other def _add(self, X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a): """add two points, assume sane parameters.""" # after add-2008-hwcd-2 # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html # NOTE: there are more efficient formulas for Z1 or Z2 == 1 A = X1 * X2 % p B = Y1 * Y2 % p C = Z1 * T2 % p D = T1 * Z2 % p E = D + C F = ((X1 - Y1) * (X2 + Y2) + B - A) % p G = B + a * A H = D - C if not H: return self._double(X1, Y1, Z1, T1, p, a) X3 = E * F % p Y3 = G * H % p T3 = E * H % p Z3 = F * G % p return X3, Y3, Z3, T3 def __add__(self, other): """Add point to another.""" if other == INFINITY: return self if ( not isinstance(other, PointEdwards) or self.__curve != other.__curve ): raise ValueError("The other point is on a different curve.") p, a = self.__curve.p(), self.__curve.a() X1, Y1, Z1, T1 = self.__coords X2, Y2, Z2, T2 = other.__coords X3, Y3, Z3, T3 = self._add(X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a) if not X3 or not T3: return INFINITY return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) def __radd__(self, other): """Add other to self.""" return self + other def _double(self, X1, Y1, Z1, T1, p, a): """Double the point, assume sane parameters.""" # after "dbl-2008-hwcd" # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html # NOTE: there are more efficient formulas for Z1 == 1 A = X1 * X1 % p B = Y1 * Y1 % p C = 2 * Z1 * Z1 % p D = a * A % p E = ((X1 + Y1) * (X1 + Y1) - A - B) % p G = D + B F = G - C H = D - B X3 = E * F % p Y3 = G * H % p T3 = E * H % p Z3 = F * G % p return X3, Y3, Z3, T3 def double(self): """Return point added to itself.""" X1, Y1, Z1, T1 = self.__coords if not X1 or not T1: return INFINITY p, a = self.__curve.p(), self.__curve.a() X3, Y3, Z3, T3 = self._double(X1, Y1, Z1, T1, p, a) if not X3 or not T3: return INFINITY return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) def __rmul__(self, other): """Multiply point by an integer.""" return self * other def _mul_precompute(self, other): """Multiply point by integer with precomputation table.""" X3, Y3, Z3, T3, p, a = 0, 1, 1, 0, self.__curve.p(), self.__curve.a() _add = self._add for X2, Y2, T2 in self.__precompute: rem = other % 4 if rem == 0 or rem == 2: other //= 2 elif rem == 3: other = (other + 1) // 2 X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, 1, -T2, p, a) else: assert rem == 1 other = (other - 1) // 2 X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, 1, T2, p, a) if not X3 or not T3: return INFINITY return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) def __mul__(self, other): """Multiply point by an integer.""" X2, Y2, Z2, T2 = self.__coords if not X2 or not T2 or not other: return INFINITY if other == 1: return self if self.__order: # order*2 as a "protection" for Minerva other = other % (self.__order * 2) if self._maybe_precompute(): return self._mul_precompute(other) X3, Y3, Z3, T3 = 0, 1, 1, 0 # INFINITY in extended coordinates p, a = self.__curve.p(), self.__curve.a() _double = self._double _add = self._add for i in reversed(self._naf(other)): X3, Y3, Z3, T3 = _double(X3, Y3, Z3, T3, p, a) if i < 0: X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, Z2, -T2, p, a) elif i > 0: X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, Z2, T2, p, a) if not X3 or not T3: return INFINITY return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) # This one point is the Point At Infinity for all purposes: INFINITY = Point(None, None, None) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1622120382.0 ecdsa-0.18.0/src/ecdsa/errors.py0000664005075200507520000000020214053713676016013 0ustar00hkariohkarioclass MalformedPointError(AssertionError): """Raised in case the encoding of private or public key is malformed.""" pass ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371671.0 ecdsa-0.18.0/src/ecdsa/keys.py0000664005075200507520000017714414262276027015472 0ustar00hkariohkario""" Primary classes for performing signing and verification operations. """ import binascii from hashlib import sha1 import os from six import PY2, b from . import ecdsa, eddsa from . import der from . import rfc6979 from . import ellipticcurve from .curves import NIST192p, Curve, Ed25519, Ed448 from .ecdsa import RSZeroError from .util import string_to_number, number_to_string, randrange from .util import sigencode_string, sigdecode_string, bit_length from .util import ( oid_ecPublicKey, encoded_oid_ecPublicKey, oid_ecDH, oid_ecMQV, MalformedSignature, ) from ._compat import normalise_bytes from .errors import MalformedPointError from .ellipticcurve import PointJacobi, CurveEdTw __all__ = [ "BadSignatureError", "BadDigestError", "VerifyingKey", "SigningKey", "MalformedPointError", ] class BadSignatureError(Exception): """ Raised when verification of signature failed. Will be raised irrespective of reason of the failure: * the calculated or provided hash does not match the signature * the signature does not match the curve/public key * the encoding of the signature is malformed * the size of the signature does not match the curve of the VerifyingKey """ pass class BadDigestError(Exception): """Raised in case the selected hash is too large for the curve.""" pass def _truncate_and_convert_digest(digest, curve, allow_truncate): """Truncates and converts digest to an integer.""" if not allow_truncate: if len(digest) > curve.baselen: raise BadDigestError( "this curve ({0}) is too short " "for the length of your digest ({1})".format( curve.name, 8 * len(digest) ) ) else: digest = digest[: curve.baselen] number = string_to_number(digest) if allow_truncate: max_length = bit_length(curve.order) # we don't use bit_length(number) as that truncates leading zeros length = len(digest) * 8 # See NIST FIPS 186-4: # # When the length of the output of the hash function is greater # than N (i.e., the bit length of q), then the leftmost N bits of # the hash function output block shall be used in any calculation # using the hash function output during the generation or # verification of a digital signature. # # as such, we need to shift-out the low-order bits: number >>= max(0, length - max_length) return number class VerifyingKey(object): """ Class for handling keys that can verify signatures (public keys). :ivar `~ecdsa.curves.Curve` ~.curve: The Curve over which all the cryptographic operations will take place :ivar default_hashfunc: the function that will be used for hashing the data. Should implement the same API as hashlib.sha1 :vartype default_hashfunc: callable :ivar pubkey: the actual public key :vartype pubkey: ~ecdsa.ecdsa.Public_key """ def __init__(self, _error__please_use_generate=None): """Unsupported, please use one of the classmethods to initialise.""" if not _error__please_use_generate: raise TypeError( "Please use VerifyingKey.generate() to construct me" ) self.curve = None self.default_hashfunc = None self.pubkey = None def __repr__(self): pub_key = self.to_string("compressed") if self.default_hashfunc: hash_name = self.default_hashfunc().name else: hash_name = "None" return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format( pub_key, self.curve, hash_name ) def __eq__(self, other): """Return True if the points are identical, False otherwise.""" if isinstance(other, VerifyingKey): return self.curve == other.curve and self.pubkey == other.pubkey return NotImplemented def __ne__(self, other): """Return False if the points are identical, True otherwise.""" return not self == other @classmethod def from_public_point( cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True ): """ Initialise the object from a Point object. This is a low-level method, generally you will not want to use it. :param point: The point to wrap around, the actual public key :type point: ~ecdsa.ellipticcurve.AbstractPoint :param curve: The curve on which the point needs to reside, defaults to NIST192p :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as :py:class:`hashlib.sha1` :type hashfunc: callable :type bool validate_point: whether to check if the point lays on curve should always be used if the public point is not a result of our own calculation :raises MalformedPointError: if the public point does not lay on the curve :return: Initialised VerifyingKey object :rtype: VerifyingKey """ self = cls(_error__please_use_generate=True) if isinstance(curve.curve, CurveEdTw): raise ValueError("Method incompatible with Edwards curves") if not isinstance(point, ellipticcurve.PointJacobi): point = ellipticcurve.PointJacobi.from_affine(point) self.curve = curve self.default_hashfunc = hashfunc try: self.pubkey = ecdsa.Public_key( curve.generator, point, validate_point ) except ecdsa.InvalidPointError: raise MalformedPointError("Point does not lay on the curve") self.pubkey.order = curve.order return self def precompute(self, lazy=False): """ Precompute multiplication tables for faster signature verification. Calling this method will cause the library to precompute the scalar multiplication tables, used in signature verification. While it's an expensive operation (comparable to performing as many signatures as the bit size of the curve, i.e. 256 for NIST256p) it speeds up verification 2 times. You should call this method if you expect to verify hundreds of signatures (or more) using the same VerifyingKey object. Note: You should call this method only once, this method generates a new precomputation table every time it's called. :param bool lazy: whether to calculate the precomputation table now (if set to False) or if it should be delayed to the time of first use (when set to True) """ if isinstance(self.curve.curve, CurveEdTw): pt = self.pubkey.point self.pubkey.point = ellipticcurve.PointEdwards( pt.curve(), pt.x(), pt.y(), 1, pt.x() * pt.y(), self.curve.order, generator=True, ) else: self.pubkey.point = ellipticcurve.PointJacobi.from_affine( self.pubkey.point, True ) # as precomputation in now delayed to the time of first use of the # point and we were asked specifically to precompute now, make # sure the precomputation is performed now to preserve the behaviour if not lazy: self.pubkey.point * 2 @classmethod def from_string( cls, string, curve=NIST192p, hashfunc=sha1, validate_point=True, valid_encodings=None, ): """ Initialise the object from byte encoding of public key. The method does accept and automatically detect the type of point encoding used. It supports the :term:`raw encoding`, :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. It also works with the native encoding of Ed25519 and Ed448 public keys (technically those are compressed, but encoded differently than in other signature systems). Note, while the method is named "from_string" it's a misnomer from Python 2 days when there were no binary strings. In Python 3 the input needs to be a bytes-like object. :param string: single point encoding of the public key :type string: :term:`bytes-like object` :param curve: the curve on which the public key is expected to lay :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1. Ignored for EdDSA. :type hashfunc: callable :param validate_point: whether to verify that the point lays on the provided curve or not, defaults to True. Ignored for EdDSA. :type validate_point: bool :param valid_encodings: list of acceptable point encoding formats, supported ones are: :term:`uncompressed`, :term:`compressed`, :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` name). All formats by default (specified with ``None``). Ignored for EdDSA. :type valid_encodings: :term:`set-like object` :raises MalformedPointError: if the public point does not lay on the curve or the encoding is invalid :return: Initialised VerifyingKey object :rtype: VerifyingKey """ if isinstance(curve.curve, CurveEdTw): self = cls(_error__please_use_generate=True) self.curve = curve self.default_hashfunc = None # ignored for EdDSA try: self.pubkey = eddsa.PublicKey(curve.generator, string) except ValueError: raise MalformedPointError("Malformed point for the curve") return self point = PointJacobi.from_bytes( curve.curve, string, validate_encoding=validate_point, valid_encodings=valid_encodings, ) return cls.from_public_point(point, curve, hashfunc, validate_point) @classmethod def from_pem( cls, string, hashfunc=sha1, valid_encodings=None, valid_curve_encodings=None, ): """ Initialise from public key stored in :term:`PEM` format. The PEM header of the key should be ``BEGIN PUBLIC KEY``. See the :func:`~VerifyingKey.from_der()` method for details of the format supported. Note: only a single PEM object decoding is supported in provided string. :param string: text with PEM-encoded public ECDSA key :type string: str :param valid_encodings: list of allowed point encodings. By default :term:`uncompressed`, :term:`compressed`, and :term:`hybrid`. To read malformed files, include :term:`raw encoding` with ``raw`` in the list. :type valid_encodings: :term:`set-like object` :param valid_curve_encodings: list of allowed encoding formats for curve parameters. By default (``None``) all are supported: ``named_curve`` and ``explicit``. :type valid_curve_encodings: :term:`set-like object` :return: Initialised VerifyingKey object :rtype: VerifyingKey """ return cls.from_der( der.unpem(string), hashfunc=hashfunc, valid_encodings=valid_encodings, valid_curve_encodings=valid_curve_encodings, ) @classmethod def from_der( cls, string, hashfunc=sha1, valid_encodings=None, valid_curve_encodings=None, ): """ Initialise the key stored in :term:`DER` format. The expected format of the key is the SubjectPublicKeyInfo structure from RFC5912 (for RSA keys, it's known as the PKCS#1 format):: SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE { algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}}, subjectPublicKey BIT STRING } Note: only public EC keys are supported by this method. The SubjectPublicKeyInfo.algorithm.algorithm field must specify id-ecPublicKey (see RFC3279). Only the named curve encoding is supported, thus the SubjectPublicKeyInfo.algorithm.parameters field needs to be an object identifier. A sequence in that field indicates an explicit parameter curve encoding, this format is not supported. A NULL object in that field indicates an "implicitlyCA" encoding, where the curve parameters come from CA certificate, those, again, are not supported. :param string: binary string with the DER encoding of public ECDSA key :type string: bytes-like object :param valid_encodings: list of allowed point encodings. By default :term:`uncompressed`, :term:`compressed`, and :term:`hybrid`. To read malformed files, include :term:`raw encoding` with ``raw`` in the list. :type valid_encodings: :term:`set-like object` :param valid_curve_encodings: list of allowed encoding formats for curve parameters. By default (``None``) all are supported: ``named_curve`` and ``explicit``. :type valid_curve_encodings: :term:`set-like object` :return: Initialised VerifyingKey object :rtype: VerifyingKey """ if valid_encodings is None: valid_encodings = set(["uncompressed", "compressed", "hybrid"]) string = normalise_bytes(string) # [[oid_ecPublicKey,oid_curve], point_str_bitstring] s1, empty = der.remove_sequence(string) if empty != b"": raise der.UnexpectedDER( "trailing junk after DER pubkey: %s" % binascii.hexlify(empty) ) s2, point_str_bitstring = der.remove_sequence(s1) # s2 = oid_ecPublicKey,oid_curve oid_pk, rest = der.remove_object(s2) if oid_pk in (Ed25519.oid, Ed448.oid): if oid_pk == Ed25519.oid: curve = Ed25519 else: assert oid_pk == Ed448.oid curve = Ed448 point_str, empty = der.remove_bitstring(point_str_bitstring, 0) if empty: raise der.UnexpectedDER("trailing junk after public key") return cls.from_string(point_str, curve, None) if not oid_pk == oid_ecPublicKey: raise der.UnexpectedDER( "Unexpected object identifier in DER " "encoding: {0!r}".format(oid_pk) ) curve = Curve.from_der(rest, valid_curve_encodings) point_str, empty = der.remove_bitstring(point_str_bitstring, 0) if empty != b"": raise der.UnexpectedDER( "trailing junk after pubkey pointstring: %s" % binascii.hexlify(empty) ) # raw encoding of point is invalid in DER files if len(point_str) == curve.verifying_key_length: raise der.UnexpectedDER("Malformed encoding of public point") return cls.from_string( point_str, curve, hashfunc=hashfunc, valid_encodings=valid_encodings, ) @classmethod def from_public_key_recovery( cls, signature, data, curve, hashfunc=sha1, sigdecode=sigdecode_string, allow_truncate=True, ): """ Return keys that can be used as verifiers of the provided signature. Tries to recover the public key that can be used to verify the signature, usually returns two keys like that. :param signature: the byte string with the encoded signature :type signature: bytes-like object :param data: the data to be hashed for signature verification :type data: bytes-like object :param curve: the curve over which the signature was performed :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :param sigdecode: Callable to define the way the signature needs to be decoded to an object, needs to handle `signature` as the first parameter, the curve order (an int) as the second and return a tuple with two integers, "r" as the first one and "s" as the second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. :param bool allow_truncate: if True, the provided hashfunc can generate values larger than the bit size of the order of the curve, the extra bits (at the end of the digest) will be truncated. :type sigdecode: callable :return: Initialised VerifyingKey objects :rtype: list of VerifyingKey """ if isinstance(curve.curve, CurveEdTw): raise ValueError("Method unsupported for Edwards curves") data = normalise_bytes(data) digest = hashfunc(data).digest() return cls.from_public_key_recovery_with_digest( signature, digest, curve, hashfunc=hashfunc, sigdecode=sigdecode, allow_truncate=allow_truncate, ) @classmethod def from_public_key_recovery_with_digest( cls, signature, digest, curve, hashfunc=sha1, sigdecode=sigdecode_string, allow_truncate=False, ): """ Return keys that can be used as verifiers of the provided signature. Tries to recover the public key that can be used to verify the signature, usually returns two keys like that. :param signature: the byte string with the encoded signature :type signature: bytes-like object :param digest: the hash value of the message signed by the signature :type digest: bytes-like object :param curve: the curve over which the signature was performed :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :param sigdecode: Callable to define the way the signature needs to be decoded to an object, needs to handle `signature` as the first parameter, the curve order (an int) as the second and return a tuple with two integers, "r" as the first one and "s" as the second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. :type sigdecode: callable :param bool allow_truncate: if True, the provided hashfunc can generate values larger than the bit size of the order of the curve (and the length of provided `digest`), the extra bits (at the end of the digest) will be truncated. :return: Initialised VerifyingKey object :rtype: VerifyingKey """ if isinstance(curve.curve, CurveEdTw): raise ValueError("Method unsupported for Edwards curves") generator = curve.generator r, s = sigdecode(signature, generator.order()) sig = ecdsa.Signature(r, s) digest = normalise_bytes(digest) digest_as_number = _truncate_and_convert_digest( digest, curve, allow_truncate ) pks = sig.recover_public_keys(digest_as_number, generator) # Transforms the ecdsa.Public_key object into a VerifyingKey verifying_keys = [ cls.from_public_point(pk.point, curve, hashfunc) for pk in pks ] return verifying_keys def to_string(self, encoding="raw"): """ Convert the public key to a byte string. The method by default uses the :term:`raw encoding` (specified by `encoding="raw"`. It can also output keys in :term:`uncompressed`, :term:`compressed` and :term:`hybrid` formats. Remember that the curve identification is not part of the encoding so to decode the point using :func:`~VerifyingKey.from_string`, curve needs to be specified. Note: while the method is called "to_string", it's a misnomer from Python 2 days when character strings and byte strings shared type. On Python 3 the returned type will be `bytes`. :return: :term:`raw encoding` of the public key (public point) on the curve :rtype: bytes """ assert encoding in ("raw", "uncompressed", "compressed", "hybrid") return self.pubkey.point.to_bytes(encoding) def to_pem( self, point_encoding="uncompressed", curve_parameters_encoding=None ): """ Convert the public key to the :term:`PEM` format. The PEM header of the key will be ``BEGIN PUBLIC KEY``. The format of the key is described in the :func:`~VerifyingKey.from_der()` method. This method supports only "named curve" encoding of keys. :param str point_encoding: specification of the encoding format of public keys. "uncompressed" is most portable, "compressed" is smallest. "hybrid" is uncommon and unsupported by most implementations, it is as big as "uncompressed". :param str curve_parameters_encoding: the encoding for curve parameters to use, by default tries to use ``named_curve`` encoding, if that is not possible, falls back to ``explicit`` encoding. :return: portable encoding of the public key :rtype: bytes .. warning:: The PEM is encoded to US-ASCII, it needs to be re-encoded if the system is incompatible (e.g. uses UTF-16) """ return der.topem( self.to_der(point_encoding, curve_parameters_encoding), "PUBLIC KEY", ) def to_der( self, point_encoding="uncompressed", curve_parameters_encoding=None ): """ Convert the public key to the :term:`DER` format. The format of the key is described in the :func:`~VerifyingKey.from_der()` method. This method supports only "named curve" encoding of keys. :param str point_encoding: specification of the encoding format of public keys. "uncompressed" is most portable, "compressed" is smallest. "hybrid" is uncommon and unsupported by most implementations, it is as big as "uncompressed". :param str curve_parameters_encoding: the encoding for curve parameters to use, by default tries to use ``named_curve`` encoding, if that is not possible, falls back to ``explicit`` encoding. :return: DER encoding of the public key :rtype: bytes """ if point_encoding == "raw": raise ValueError("raw point_encoding not allowed in DER") point_str = self.to_string(point_encoding) if isinstance(self.curve.curve, CurveEdTw): return der.encode_sequence( der.encode_sequence(der.encode_oid(*self.curve.oid)), der.encode_bitstring(bytes(point_str), 0), ) return der.encode_sequence( der.encode_sequence( encoded_oid_ecPublicKey, self.curve.to_der(curve_parameters_encoding, point_encoding), ), # 0 is the number of unused bits in the # bit string der.encode_bitstring(point_str, 0), ) def verify( self, signature, data, hashfunc=None, sigdecode=sigdecode_string, allow_truncate=True, ): """ Verify a signature made over provided data. Will hash `data` to verify the signature. By default expects signature in :term:`raw encoding`. Can also be used to verify signatures in ASN.1 DER encoding by using :func:`ecdsa.util.sigdecode_der` as the `sigdecode` parameter. :param signature: encoding of the signature :type signature: sigdecode method dependent :param data: data signed by the `signature`, will be hashed using `hashfunc`, if specified, or default hash function :type data: :term:`bytes-like object` :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :param sigdecode: Callable to define the way the signature needs to be decoded to an object, needs to handle `signature` as the first parameter, the curve order (an int) as the second and return a tuple with two integers, "r" as the first one and "s" as the second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. :type sigdecode: callable :param bool allow_truncate: if True, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when verifying SHA-384 output using NIST256p or in similar situations. Defaults to True. :raises BadSignatureError: if the signature is invalid or malformed :return: True if the verification was successful :rtype: bool """ # signature doesn't have to be a bytes-like-object so don't normalise # it, the decoders will do that data = normalise_bytes(data) if isinstance(self.curve.curve, CurveEdTw): signature = normalise_bytes(signature) try: return self.pubkey.verify(data, signature) except (ValueError, MalformedPointError) as e: raise BadSignatureError("Signature verification failed", e) hashfunc = hashfunc or self.default_hashfunc digest = hashfunc(data).digest() return self.verify_digest(signature, digest, sigdecode, allow_truncate) def verify_digest( self, signature, digest, sigdecode=sigdecode_string, allow_truncate=False, ): """ Verify a signature made over provided hash value. By default expects signature in :term:`raw encoding`. Can also be used to verify signatures in ASN.1 DER encoding by using :func:`ecdsa.util.sigdecode_der` as the `sigdecode` parameter. :param signature: encoding of the signature :type signature: sigdecode method dependent :param digest: raw hash value that the signature authenticates. :type digest: :term:`bytes-like object` :param sigdecode: Callable to define the way the signature needs to be decoded to an object, needs to handle `signature` as the first parameter, the curve order (an int) as the second and return a tuple with two integers, "r" as the first one and "s" as the second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. :type sigdecode: callable :param bool allow_truncate: if True, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when verifying SHA-384 output using NIST256p or in similar situations. :raises BadSignatureError: if the signature is invalid or malformed :raises BadDigestError: if the provided digest is too big for the curve associated with this VerifyingKey and allow_truncate was not set :return: True if the verification was successful :rtype: bool """ # signature doesn't have to be a bytes-like-object so don't normalise # it, the decoders will do that digest = normalise_bytes(digest) number = _truncate_and_convert_digest( digest, self.curve, allow_truncate, ) try: r, s = sigdecode(signature, self.pubkey.order) except (der.UnexpectedDER, MalformedSignature) as e: raise BadSignatureError("Malformed formatting of signature", e) sig = ecdsa.Signature(r, s) if self.pubkey.verifies(number, sig): return True raise BadSignatureError("Signature verification failed") class SigningKey(object): """ Class for handling keys that can create signatures (private keys). :ivar `~ecdsa.curves.Curve` curve: The Curve over which all the cryptographic operations will take place :ivar default_hashfunc: the function that will be used for hashing the data. Should implement the same API as :py:class:`hashlib.sha1` :ivar int baselen: the length of a :term:`raw encoding` of private key :ivar `~ecdsa.keys.VerifyingKey` verifying_key: the public key associated with this private key :ivar `~ecdsa.ecdsa.Private_key` privkey: the actual private key """ def __init__(self, _error__please_use_generate=None): """Unsupported, please use one of the classmethods to initialise.""" if not _error__please_use_generate: raise TypeError("Please use SigningKey.generate() to construct me") self.curve = None self.default_hashfunc = None self.baselen = None self.verifying_key = None self.privkey = None def __eq__(self, other): """Return True if the points are identical, False otherwise.""" if isinstance(other, SigningKey): return ( self.curve == other.curve and self.verifying_key == other.verifying_key and self.privkey == other.privkey ) return NotImplemented def __ne__(self, other): """Return False if the points are identical, True otherwise.""" return not self == other @classmethod def _twisted_edwards_keygen(cls, curve, entropy): """Generate a private key on a Twisted Edwards curve.""" if not entropy: entropy = os.urandom random = entropy(curve.baselen) private_key = eddsa.PrivateKey(curve.generator, random) public_key = private_key.public_key() verifying_key = VerifyingKey.from_string( public_key.public_key(), curve ) self = cls(_error__please_use_generate=True) self.curve = curve self.default_hashfunc = None self.baselen = curve.baselen self.privkey = private_key self.verifying_key = verifying_key return self @classmethod def _weierstrass_keygen(cls, curve, entropy, hashfunc): """Generate a private key on a Weierstrass curve.""" secexp = randrange(curve.order, entropy) return cls.from_secret_exponent(secexp, curve, hashfunc) @classmethod def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): """ Generate a random private key. :param curve: The curve on which the point needs to reside, defaults to NIST192p :type curve: ~ecdsa.curves.Curve :param entropy: Source of randomness for generating the private keys, should provide cryptographically secure random numbers if the keys need to be secure. Uses os.urandom() by default. :type entropy: callable :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :return: Initialised SigningKey object :rtype: SigningKey """ if isinstance(curve.curve, CurveEdTw): return cls._twisted_edwards_keygen(curve, entropy) return cls._weierstrass_keygen(curve, entropy, hashfunc) @classmethod def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): """ Create a private key from a random integer. Note: it's a low level method, it's recommended to use the :func:`~SigningKey.generate` method to create private keys. :param int secexp: secret multiplier (the actual private key in ECDSA). Needs to be an integer between 1 and the curve order. :param curve: The curve on which the point needs to reside :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :raises MalformedPointError: when the provided secexp is too large or too small for the curve selected :raises RuntimeError: if the generation of public key from private key failed :return: Initialised SigningKey object :rtype: SigningKey """ if isinstance(curve.curve, CurveEdTw): raise ValueError( "Edwards keys don't support setting the secret scalar " "(exponent) directly" ) self = cls(_error__please_use_generate=True) self.curve = curve self.default_hashfunc = hashfunc self.baselen = curve.baselen n = curve.order if not 1 <= secexp < n: raise MalformedPointError( "Invalid value for secexp, expected integer " "between 1 and {0}".format(n) ) pubkey_point = curve.generator * secexp if hasattr(pubkey_point, "scale"): pubkey_point = pubkey_point.scale() self.verifying_key = VerifyingKey.from_public_point( pubkey_point, curve, hashfunc, False ) pubkey = self.verifying_key.pubkey self.privkey = ecdsa.Private_key(pubkey, secexp) self.privkey.order = n return self @classmethod def from_string(cls, string, curve=NIST192p, hashfunc=sha1): """ Decode the private key from :term:`raw encoding`. Note: the name of this method is a misnomer coming from days of Python 2, when binary strings and character strings shared a type. In Python 3, the expected type is `bytes`. :param string: the raw encoding of the private key :type string: :term:`bytes-like object` :param curve: The curve on which the point needs to reside :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :raises MalformedPointError: if the length of encoding doesn't match the provided curve or the encoded values is too large :raises RuntimeError: if the generation of public key from private key failed :return: Initialised SigningKey object :rtype: SigningKey """ string = normalise_bytes(string) if len(string) != curve.baselen: raise MalformedPointError( "Invalid length of private key, received {0}, " "expected {1}".format(len(string), curve.baselen) ) if isinstance(curve.curve, CurveEdTw): self = cls(_error__please_use_generate=True) self.curve = curve self.default_hashfunc = None # Ignored for EdDSA self.baselen = curve.baselen self.privkey = eddsa.PrivateKey(curve.generator, string) self.verifying_key = VerifyingKey.from_string( self.privkey.public_key().public_key(), curve ) return self secexp = string_to_number(string) return cls.from_secret_exponent(secexp, curve, hashfunc) @classmethod def from_pem(cls, string, hashfunc=sha1, valid_curve_encodings=None): """ Initialise from key stored in :term:`PEM` format. The PEM formats supported are the un-encrypted RFC5915 (the ssleay format) supported by OpenSSL, and the more common un-encrypted RFC5958 (the PKCS #8 format). The legacy format files have the header with the string ``BEGIN EC PRIVATE KEY``. PKCS#8 files have the header ``BEGIN PRIVATE KEY``. Encrypted files (ones that include the string ``Proc-Type: 4,ENCRYPTED`` right after the PEM header) are not supported. See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in this files. :param string: text with PEM-encoded private ECDSA key :type string: str :param valid_curve_encodings: list of allowed encoding formats for curve parameters. By default (``None``) all are supported: ``named_curve`` and ``explicit``. :type valid_curve_encodings: :term:`set-like object` :raises MalformedPointError: if the length of encoding doesn't match the provided curve or the encoded values is too large :raises RuntimeError: if the generation of public key from private key failed :raises UnexpectedDER: if the encoding of the PEM file is incorrect :return: Initialised SigningKey object :rtype: SigningKey """ if not PY2 and isinstance(string, str): # pragma: no branch string = string.encode() # The privkey pem may have multiple sections, commonly it also has # "EC PARAMETERS", we need just "EC PRIVATE KEY". PKCS#8 should not # have the "EC PARAMETERS" section; it's just "PRIVATE KEY". private_key_index = string.find(b"-----BEGIN EC PRIVATE KEY-----") if private_key_index == -1: private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----") return cls.from_der( der.unpem(string[private_key_index:]), hashfunc, valid_curve_encodings, ) @classmethod def from_der(cls, string, hashfunc=sha1, valid_curve_encodings=None): """ Initialise from key stored in :term:`DER` format. The DER formats supported are the un-encrypted RFC5915 (the ssleay format) supported by OpenSSL, and the more common un-encrypted RFC5958 (the PKCS #8 format). Both formats contain an ASN.1 object following the syntax specified in RFC5915:: ECPrivateKey ::= SEQUENCE { version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1), privateKey OCTET STRING, parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, publicKey [1] BIT STRING OPTIONAL } `publicKey` field is ignored completely (errors, if any, in it will be undetected). Two formats are supported for the `parameters` field: the named curve and the explicit encoding of curve parameters. In the legacy ssleay format, this implementation requires the optional `parameters` field to get the curve name. In PKCS #8 format, the curve is part of the PrivateKeyAlgorithmIdentifier. The PKCS #8 format includes an ECPrivateKey object as the `privateKey` field within a larger structure:: OneAsymmetricKey ::= SEQUENCE { version Version, privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, privateKey PrivateKey, attributes [0] Attributes OPTIONAL, ..., [[2: publicKey [1] PublicKey OPTIONAL ]], ... } The `attributes` and `publicKey` fields are completely ignored; errors in them will not be detected. :param string: binary string with DER-encoded private ECDSA key :type string: :term:`bytes-like object` :param valid_curve_encodings: list of allowed encoding formats for curve parameters. By default (``None``) all are supported: ``named_curve`` and ``explicit``. Ignored for EdDSA. :type valid_curve_encodings: :term:`set-like object` :raises MalformedPointError: if the length of encoding doesn't match the provided curve or the encoded values is too large :raises RuntimeError: if the generation of public key from private key failed :raises UnexpectedDER: if the encoding of the DER file is incorrect :return: Initialised SigningKey object :rtype: SigningKey """ s = normalise_bytes(string) curve = None s, empty = der.remove_sequence(s) if empty != b(""): raise der.UnexpectedDER( "trailing junk after DER privkey: %s" % binascii.hexlify(empty) ) version, s = der.remove_integer(s) # At this point, PKCS #8 has a sequence containing the algorithm # identifier and the curve identifier. The ssleay format instead has # an octet string containing the key data, so this is how we can # distinguish the two formats. if der.is_sequence(s): if version not in (0, 1): raise der.UnexpectedDER( "expected version '0' or '1' at start of privkey, got %d" % version ) sequence, s = der.remove_sequence(s) algorithm_oid, algorithm_identifier = der.remove_object(sequence) if algorithm_oid in (Ed25519.oid, Ed448.oid): if algorithm_identifier: raise der.UnexpectedDER( "Non NULL parameters for a EdDSA key" ) key_str_der, s = der.remove_octet_string(s) # As RFC5958 describe, there are may be optional Attributes # and Publickey. Don't raise error if something after # Privatekey # TODO parse attributes or validate publickey # if s: # raise der.UnexpectedDER( # "trailing junk inside the privateKey" # ) key_str, s = der.remove_octet_string(key_str_der) if s: raise der.UnexpectedDER( "trailing junk after the encoded private key" ) if algorithm_oid == Ed25519.oid: curve = Ed25519 else: assert algorithm_oid == Ed448.oid curve = Ed448 return cls.from_string(key_str, curve, None) if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV): raise der.UnexpectedDER( "unexpected algorithm identifier '%s'" % (algorithm_oid,) ) curve = Curve.from_der(algorithm_identifier, valid_curve_encodings) if empty != b"": raise der.UnexpectedDER( "unexpected data after algorithm identifier: %s" % binascii.hexlify(empty) ) # Up next is an octet string containing an ECPrivateKey. Ignore # the optional "attributes" and "publicKey" fields that come after. s, _ = der.remove_octet_string(s) # Unpack the ECPrivateKey to get to the key data octet string, # and rejoin the ssleay parsing path. s, empty = der.remove_sequence(s) if empty != b(""): raise der.UnexpectedDER( "trailing junk after DER privkey: %s" % binascii.hexlify(empty) ) version, s = der.remove_integer(s) # The version of the ECPrivateKey must be 1. if version != 1: raise der.UnexpectedDER( "expected version '1' at start of DER privkey, got %d" % version ) privkey_str, s = der.remove_octet_string(s) if not curve: tag, curve_oid_str, s = der.remove_constructed(s) if tag != 0: raise der.UnexpectedDER( "expected tag 0 in DER privkey, got %d" % tag ) curve = Curve.from_der(curve_oid_str, valid_curve_encodings) # we don't actually care about the following fields # # tag, pubkey_bitstring, s = der.remove_constructed(s) # if tag != 1: # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d" # % tag) # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0) # if empty != "": # raise der.UnexpectedDER("trailing junk after DER privkey " # "pubkeystr: %s" # % binascii.hexlify(empty)) # our from_string method likes fixed-length privkey strings if len(privkey_str) < curve.baselen: privkey_str = ( b("\x00") * (curve.baselen - len(privkey_str)) + privkey_str ) return cls.from_string(privkey_str, curve, hashfunc) def to_string(self): """ Convert the private key to :term:`raw encoding`. Note: while the method is named "to_string", its name comes from Python 2 days, when binary and character strings used the same type. The type used in Python 3 is `bytes`. :return: raw encoding of private key :rtype: bytes """ if isinstance(self.curve.curve, CurveEdTw): return bytes(self.privkey.private_key) secexp = self.privkey.secret_multiplier s = number_to_string(secexp, self.privkey.order) return s def to_pem( self, point_encoding="uncompressed", format="ssleay", curve_parameters_encoding=None, ): """ Convert the private key to the :term:`PEM` format. See :func:`~SigningKey.from_pem` method for format description. Only the named curve format is supported. The public key will be included in generated string. The PEM header will specify ``BEGIN EC PRIVATE KEY`` or ``BEGIN PRIVATE KEY``, depending on the desired format. :param str point_encoding: format to use for encoding public point :param str format: either ``ssleay`` (default) or ``pkcs8`` :param str curve_parameters_encoding: format of encoded curve parameters, default depends on the curve, if the curve has an associated OID, ``named_curve`` format will be used, if no OID is associated with the curve, the fallback of ``explicit`` parameters will be used. :return: PEM encoded private key :rtype: bytes .. warning:: The PEM is encoded to US-ASCII, it needs to be re-encoded if the system is incompatible (e.g. uses UTF-16) """ # TODO: "BEGIN ECPARAMETERS" assert format in ("ssleay", "pkcs8") header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY" return der.topem( self.to_der(point_encoding, format, curve_parameters_encoding), header, ) def _encode_eddsa(self): """Create a PKCS#8 encoding of EdDSA keys.""" ec_private_key = der.encode_octet_string(self.to_string()) return der.encode_sequence( der.encode_integer(0), der.encode_sequence(der.encode_oid(*self.curve.oid)), der.encode_octet_string(ec_private_key), ) def to_der( self, point_encoding="uncompressed", format="ssleay", curve_parameters_encoding=None, ): """ Convert the private key to the :term:`DER` format. See :func:`~SigningKey.from_der` method for format specification. Only the named curve format is supported. The public key will be included in the generated string. :param str point_encoding: format to use for encoding public point Ignored for EdDSA :param str format: either ``ssleay`` (default) or ``pkcs8``. EdDSA keys require ``pkcs8``. :param str curve_parameters_encoding: format of encoded curve parameters, default depends on the curve, if the curve has an associated OID, ``named_curve`` format will be used, if no OID is associated with the curve, the fallback of ``explicit`` parameters will be used. Ignored for EdDSA. :return: DER encoded private key :rtype: bytes """ # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1), # cont[1],bitstring]) if point_encoding == "raw": raise ValueError("raw encoding not allowed in DER") assert format in ("ssleay", "pkcs8") if isinstance(self.curve.curve, CurveEdTw): if format != "pkcs8": raise ValueError("Only PKCS#8 format supported for EdDSA keys") return self._encode_eddsa() encoded_vk = self.get_verifying_key().to_string(point_encoding) priv_key_elems = [ der.encode_integer(1), der.encode_octet_string(self.to_string()), ] if format == "ssleay": priv_key_elems.append( der.encode_constructed( 0, self.curve.to_der(curve_parameters_encoding) ) ) # the 0 in encode_bitstring specifies the number of unused bits # in the `encoded_vk` string priv_key_elems.append( der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)) ) ec_private_key = der.encode_sequence(*priv_key_elems) if format == "ssleay": return ec_private_key else: return der.encode_sequence( # version = 1 means the public key is not present in the # top-level structure. der.encode_integer(1), der.encode_sequence( der.encode_oid(*oid_ecPublicKey), self.curve.to_der(curve_parameters_encoding), ), der.encode_octet_string(ec_private_key), ) def get_verifying_key(self): """ Return the VerifyingKey associated with this private key. Equivalent to reading the `verifying_key` field of an instance. :return: a public key that can be used to verify the signatures made with this SigningKey :rtype: VerifyingKey """ return self.verifying_key def sign_deterministic( self, data, hashfunc=None, sigencode=sigencode_string, extra_entropy=b"", ): """ Create signature over data. For Weierstrass curves it uses the deterministic RFC6979 algorithm. For Edwards curves it uses the standard EdDSA algorithm. For ECDSA the data will be hashed using the `hashfunc` function before signing. For EdDSA the data will be hashed with the hash associated with the curve (SHA-512 for Ed25519 and SHAKE-256 for Ed448). This is the recommended method for performing signatures when hashing of data is necessary. :param data: data to be hashed and computed signature over :type data: :term:`bytes-like object` :param hashfunc: hash function to use for computing the signature, if unspecified, the default hash function selected during object initialisation will be used (see `VerifyingKey.default_hashfunc`). The object needs to implement the same interface as hashlib.sha1. Ignored with EdDSA. :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` as examples of such functions. Ignored with EdDSA. :type sigencode: callable :param extra_entropy: additional data that will be fed into the random number generator used in the RFC6979 process. Entirely optional. Ignored with EdDSA. :type extra_entropy: :term:`bytes-like object` :return: encoded signature over `data` :rtype: bytes or sigencode function dependent type """ hashfunc = hashfunc or self.default_hashfunc data = normalise_bytes(data) if isinstance(self.curve.curve, CurveEdTw): return self.privkey.sign(data) extra_entropy = normalise_bytes(extra_entropy) digest = hashfunc(data).digest() return self.sign_digest_deterministic( digest, hashfunc=hashfunc, sigencode=sigencode, extra_entropy=extra_entropy, allow_truncate=True, ) def sign_digest_deterministic( self, digest, hashfunc=None, sigencode=sigencode_string, extra_entropy=b"", allow_truncate=False, ): """ Create signature for digest using the deterministic RFC6979 algorithm. `digest` should be the output of cryptographically secure hash function like SHA256 or SHA-3-256. This is the recommended method for performing signatures when no hashing of data is necessary. :param digest: hash of data that will be signed :type digest: :term:`bytes-like object` :param hashfunc: hash function to use for computing the random "k" value from RFC6979 process, if unspecified, the default hash function selected during object initialisation will be used (see :attr:`.VerifyingKey.default_hashfunc`). The object needs to implement the same interface as :func:`~hashlib.sha1` from :py:mod:`hashlib`. :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. See :func:`~ecdsa.util.sigencode_string` and :func:`~ecdsa.util.sigencode_der` as examples of such functions. :type sigencode: callable :param extra_entropy: additional data that will be fed into the random number generator used in the RFC6979 process. Entirely optional. :type extra_entropy: :term:`bytes-like object` :param bool allow_truncate: if True, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when signing SHA-384 output using NIST256p or in similar situations. :return: encoded signature for the `digest` hash :rtype: bytes or sigencode function dependent type """ if isinstance(self.curve.curve, CurveEdTw): raise ValueError("Method unsupported for Edwards curves") secexp = self.privkey.secret_multiplier hashfunc = hashfunc or self.default_hashfunc digest = normalise_bytes(digest) extra_entropy = normalise_bytes(extra_entropy) def simple_r_s(r, s, order): return r, s, order retry_gen = 0 while True: k = rfc6979.generate_k( self.curve.generator.order(), secexp, hashfunc, digest, retry_gen=retry_gen, extra_entropy=extra_entropy, ) try: r, s, order = self.sign_digest( digest, sigencode=simple_r_s, k=k, allow_truncate=allow_truncate, ) break except RSZeroError: retry_gen += 1 return sigencode(r, s, order) def sign( self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None, allow_truncate=True, ): """ Create signature over data. Uses the probabilistic ECDSA algorithm for Weierstrass curves (NIST256p, etc.) and the deterministic EdDSA algorithm for the Edwards curves (Ed25519, Ed448). This method uses the standard ECDSA algorithm that requires a cryptographically secure random number generator. It's recommended to use the :func:`~SigningKey.sign_deterministic` method instead of this one. :param data: data that will be hashed for signing :type data: :term:`bytes-like object` :param callable entropy: randomness source, :func:`os.urandom` by default. Ignored with EdDSA. :param hashfunc: hash function to use for hashing the provided ``data``. If unspecified the default hash function selected during object initialisation will be used (see :attr:`.VerifyingKey.default_hashfunc`). Should behave like :func:`~hashlib.sha1` from :py:mod:`hashlib`. The output length of the hash (in bytes) must not be longer than the length of the curve order (rounded up to the nearest byte), so using SHA256 with NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish unlikely event of a hash output larger than the curve order, the hash will effectively be wrapped mod n). If you want to explicitly allow use of large hashes with small curves set the ``allow_truncate`` to ``True``. Use ``hashfunc=hashlib.sha1`` to match openssl's ``-ecdsa-with-SHA1`` mode, or ``hashfunc=hashlib.sha256`` for openssl-1.0.0's ``-ecdsa-with-SHA256``. Ignored for EdDSA :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. See :func:`~ecdsa.util.sigencode_string` and :func:`~ecdsa.util.sigencode_der` as examples of such functions. Ignored for EdDSA :type sigencode: callable :param int k: a pre-selected nonce for calculating the signature. In typical use cases, it should be set to None (the default) to allow its generation from an entropy source. Ignored for EdDSA. :param bool allow_truncate: if ``True``, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when signing SHA-384 output using NIST256p or in similar situations. True by default. Ignored for EdDSA. :raises RSZeroError: in the unlikely event when *r* parameter or *s* parameter of the created signature is equal 0, as that would leak the key. Caller should try a better entropy source, retry with different ``k``, or use the :func:`~SigningKey.sign_deterministic` in such case. :return: encoded signature of the hash of `data` :rtype: bytes or sigencode function dependent type """ hashfunc = hashfunc or self.default_hashfunc data = normalise_bytes(data) if isinstance(self.curve.curve, CurveEdTw): return self.sign_deterministic(data) h = hashfunc(data).digest() return self.sign_digest(h, entropy, sigencode, k, allow_truncate) def sign_digest( self, digest, entropy=None, sigencode=sigencode_string, k=None, allow_truncate=False, ): """ Create signature over digest using the probabilistic ECDSA algorithm. This method uses the standard ECDSA algorithm that requires a cryptographically secure random number generator. This method does not hash the input. It's recommended to use the :func:`~SigningKey.sign_digest_deterministic` method instead of this one. :param digest: hash value that will be signed :type digest: :term:`bytes-like object` :param callable entropy: randomness source, os.urandom by default :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` as examples of such functions. :type sigencode: callable :param int k: a pre-selected nonce for calculating the signature. In typical use cases, it should be set to None (the default) to allow its generation from an entropy source. :param bool allow_truncate: if True, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when signing SHA-384 output using NIST256p or in similar situations. :raises RSZeroError: in the unlikely event when "r" parameter or "s" parameter of the created signature is equal 0, as that would leak the key. Caller should try a better entropy source, retry with different 'k', or use the :func:`~SigningKey.sign_digest_deterministic` in such case. :return: encoded signature for the `digest` hash :rtype: bytes or sigencode function dependent type """ if isinstance(self.curve.curve, CurveEdTw): raise ValueError("Method unsupported for Edwards curves") digest = normalise_bytes(digest) number = _truncate_and_convert_digest( digest, self.curve, allow_truncate, ) r, s = self.sign_number(number, entropy, k) return sigencode(r, s, self.privkey.order) def sign_number(self, number, entropy=None, k=None): """ Sign an integer directly. Note, this is a low level method, usually you will want to use :func:`~SigningKey.sign_deterministic` or :func:`~SigningKey.sign_digest_deterministic`. :param int number: number to sign using the probabilistic ECDSA algorithm. :param callable entropy: entropy source, os.urandom by default :param int k: pre-selected nonce for signature operation. If unset it will be selected at random using the entropy source. :raises RSZeroError: in the unlikely event when "r" parameter or "s" parameter of the created signature is equal 0, as that would leak the key. Caller should try a better entropy source, retry with different 'k', or use the :func:`~SigningKey.sign_digest_deterministic` in such case. :return: the "r" and "s" parameters of the signature :rtype: tuple of ints """ if isinstance(self.curve.curve, CurveEdTw): raise ValueError("Method unsupported for Edwards curves") order = self.privkey.order if k is not None: _k = k else: _k = randrange(order, entropy) assert 1 <= _k < order sig = self.privkey.sign(number, _k) return sig.r, sig.s ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/numbertheory.py0000664005075200507520000004210714222604175017223 0ustar00hkariohkario#! /usr/bin/env python # # Provide some simple capabilities from number theory. # # Version of 2008.11.14. # # Written in 2005 and 2006 by Peter Pearson and placed in the public domain. # Revision history: # 2008.11.14: Use pow(base, exponent, modulus) for modular_exp. # Make gcd and lcm accept arbitrarily many arguments. from __future__ import division import sys from six import integer_types, PY2 from six.moves import reduce try: xrange except NameError: xrange = range try: from gmpy2 import powmod GMPY2 = True GMPY = False except ImportError: GMPY2 = False try: from gmpy import mpz GMPY = True except ImportError: GMPY = False import math import warnings class Error(Exception): """Base class for exceptions in this module.""" pass class JacobiError(Error): pass class SquareRootError(Error): pass class NegativeExponentError(Error): pass def modular_exp(base, exponent, modulus): # pragma: no cover """Raise base to exponent, reducing by modulus""" # deprecated in 0.14 warnings.warn( "Function is unused in library code. If you use this code, " "change to pow() builtin.", DeprecationWarning, ) if exponent < 0: raise NegativeExponentError( "Negative exponents (%d) not allowed" % exponent ) return pow(base, exponent, modulus) def polynomial_reduce_mod(poly, polymod, p): """Reduce poly by polymod, integer arithmetic modulo p. Polynomials are represented as lists of coefficients of increasing powers of x.""" # This module has been tested only by extensive use # in calculating modular square roots. # Just to make this easy, require a monic polynomial: assert polymod[-1] == 1 assert len(polymod) > 1 while len(poly) >= len(polymod): if poly[-1] != 0: for i in xrange(2, len(polymod) + 1): poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p poly = poly[0:-1] return poly def polynomial_multiply_mod(m1, m2, polymod, p): """Polynomial multiplication modulo a polynomial over ints mod p. Polynomials are represented as lists of coefficients of increasing powers of x.""" # This is just a seat-of-the-pants implementation. # This module has been tested only by extensive use # in calculating modular square roots. # Initialize the product to zero: prod = (len(m1) + len(m2) - 1) * [0] # Add together all the cross-terms: for i in xrange(len(m1)): for j in xrange(len(m2)): prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p return polynomial_reduce_mod(prod, polymod, p) def polynomial_exp_mod(base, exponent, polymod, p): """Polynomial exponentiation modulo a polynomial over ints mod p. Polynomials are represented as lists of coefficients of increasing powers of x.""" # Based on the Handbook of Applied Cryptography, algorithm 2.227. # This module has been tested only by extensive use # in calculating modular square roots. assert exponent < p if exponent == 0: return [1] G = base k = exponent if k % 2 == 1: s = G else: s = [1] while k > 1: k = k // 2 G = polynomial_multiply_mod(G, G, polymod, p) if k % 2 == 1: s = polynomial_multiply_mod(G, s, polymod, p) return s def jacobi(a, n): """Jacobi symbol""" # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149. # This function has been tested by comparison with a small # table printed in HAC, and by extensive use in calculating # modular square roots. if not n >= 3: raise JacobiError("n must be larger than 2") if not n % 2 == 1: raise JacobiError("n must be odd") a = a % n if a == 0: return 0 if a == 1: return 1 a1, e = a, 0 while a1 % 2 == 0: a1, e = a1 // 2, e + 1 if e % 2 == 0 or n % 8 == 1 or n % 8 == 7: s = 1 else: s = -1 if a1 == 1: return s if n % 4 == 3 and a1 % 4 == 3: s = -s return s * jacobi(n % a1, a1) def square_root_mod_prime(a, p): """Modular square root of a, mod p, p prime.""" # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39. # This module has been tested for all values in [0,p-1] for # every prime p from 3 to 1229. assert 0 <= a < p assert 1 < p if a == 0: return 0 if p == 2: return a jac = jacobi(a, p) if jac == -1: raise SquareRootError("%d has no square root modulo %d" % (a, p)) if p % 4 == 3: return pow(a, (p + 1) // 4, p) if p % 8 == 5: d = pow(a, (p - 1) // 4, p) if d == 1: return pow(a, (p + 3) // 8, p) assert d == p - 1 return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p if PY2: # xrange on python2 can take integers representable as C long only range_top = min(0x7FFFFFFF, p) else: range_top = p for b in xrange(2, range_top): if jacobi(b * b - 4 * a, p) == -1: f = (a, -b, 1) ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p) if ff[1]: raise SquareRootError("p is not prime") return ff[0] raise RuntimeError("No b found.") # because all the inverse_mod code is arch/environment specific, and coveralls # expects it to execute equal number of times, we need to waive it by # adding the "no branch" pragma to all branches if GMPY2: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" if a == 0: # pragma: no branch return 0 return powmod(a, -1, m) elif GMPY: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" # while libgmp does support inverses modulo, it is accessible # only using the native `pow()` function, and `pow()` in gmpy sanity # checks the parameters before passing them on to underlying # implementation if a == 0: # pragma: no branch return 0 a = mpz(a) m = mpz(m) lm, hm = mpz(1), mpz(0) low, high = a % m, m while low > 1: # pragma: no branch r = high // low lm, low, hm, high = hm - lm * r, high - low * r, lm, low return lm % m elif sys.version_info >= (3, 8): # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" if a == 0: # pragma: no branch return 0 return pow(a, -1, m) else: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" if a == 0: # pragma: no branch return 0 lm, hm = 1, 0 low, high = a % m, m while low > 1: # pragma: no branch r = high // low lm, low, hm, high = hm - lm * r, high - low * r, lm, low return lm % m try: gcd2 = math.gcd except AttributeError: def gcd2(a, b): """Greatest common divisor using Euclid's algorithm.""" while a: a, b = b % a, a return b def gcd(*a): """Greatest common divisor. Usage: gcd([ 2, 4, 6 ]) or: gcd(2, 4, 6) """ if len(a) > 1: return reduce(gcd2, a) if hasattr(a[0], "__iter__"): return reduce(gcd2, a[0]) return a[0] def lcm2(a, b): """Least common multiple of two integers.""" return (a * b) // gcd(a, b) def lcm(*a): """Least common multiple. Usage: lcm([ 3, 4, 5 ]) or: lcm(3, 4, 5) """ if len(a) > 1: return reduce(lcm2, a) if hasattr(a[0], "__iter__"): return reduce(lcm2, a[0]) return a[0] def factorization(n): """Decompose n into a list of (prime,exponent) pairs.""" assert isinstance(n, integer_types) if n < 2: return [] result = [] # Test the small primes: for d in smallprimes: if d > n: break q, r = divmod(n, d) if r == 0: count = 1 while d <= n: n = q q, r = divmod(n, d) if r != 0: break count = count + 1 result.append((d, count)) # If n is still greater than the last of our small primes, # it may require further work: if n > smallprimes[-1]: if is_prime(n): # If what's left is prime, it's easy: result.append((n, 1)) else: # Ugh. Search stupidly for a divisor: d = smallprimes[-1] while 1: d = d + 2 # Try the next divisor. q, r = divmod(n, d) if q < d: # n < d*d means we're done, n = 1 or prime. break if r == 0: # d divides n. How many times? count = 1 n = q while d <= n: # As long as d might still divide n, q, r = divmod(n, d) # see if it does. if r != 0: break n = q # It does. Reduce n, increase count. count = count + 1 result.append((d, count)) if n > 1: result.append((n, 1)) return result def phi(n): # pragma: no cover """Return the Euler totient function of n.""" # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) assert isinstance(n, integer_types) if n < 3: return 1 result = 1 ff = factorization(n) for f in ff: e = f[1] if e > 1: result = result * f[0] ** (e - 1) * (f[0] - 1) else: result = result * (f[0] - 1) return result def carmichael(n): # pragma: no cover """Return Carmichael function of n. Carmichael(n) is the smallest integer x such that m**x = 1 mod n for all m relatively prime to n. """ # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) return carmichael_of_factorized(factorization(n)) def carmichael_of_factorized(f_list): # pragma: no cover """Return the Carmichael function of a number that is represented as a list of (prime,exponent) pairs. """ # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) if len(f_list) < 1: return 1 result = carmichael_of_ppower(f_list[0]) for i in xrange(1, len(f_list)): result = lcm(result, carmichael_of_ppower(f_list[i])) return result def carmichael_of_ppower(pp): # pragma: no cover """Carmichael function of the given power of the given prime.""" # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) p, a = pp if p == 2 and a > 2: return 2 ** (a - 2) else: return (p - 1) * p ** (a - 1) def order_mod(x, m): # pragma: no cover """Return the order of x in the multiplicative group mod m.""" # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) # Warning: this implementation is not very clever, and will # take a long time if m is very large. if m <= 1: return 0 assert gcd(x, m) == 1 z = x result = 1 while z != 1: z = (z * x) % m result = result + 1 return result def largest_factor_relatively_prime(a, b): # pragma: no cover """Return the largest factor of a relatively prime to b.""" # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) while 1: d = gcd(a, b) if d <= 1: break b = d while 1: q, r = divmod(a, d) if r > 0: break a = q return a def kinda_order_mod(x, m): # pragma: no cover """Return the order of x in the multiplicative group mod m', where m' is the largest factor of m relatively prime to x. """ # deprecated in 0.14 warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) return order_mod(x, largest_factor_relatively_prime(m, x)) def is_prime(n): """Return True if x is prime, False otherwise. We use the Miller-Rabin test, as given in Menezes et al. p. 138. This test is not exact: there are composite values n for which it returns True. In testing the odd numbers from 10000001 to 19999999, about 66 composites got past the first test, 5 got past the second test, and none got past the third. Since factors of 2, 3, 5, 7, and 11 were detected during preliminary screening, the number of numbers tested by Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7) = 4.57 million. """ # (This is used to study the risk of false positives:) global miller_rabin_test_count miller_rabin_test_count = 0 if n <= smallprimes[-1]: if n in smallprimes: return True else: return False if gcd(n, 2 * 3 * 5 * 7 * 11) != 1: return False # Choose a number of iterations sufficient to reduce the # probability of accepting a composite below 2**-80 # (from Menezes et al. Table 4.4): t = 40 n_bits = 1 + int(math.log(n, 2)) for k, tt in ( (100, 27), (150, 18), (200, 15), (250, 12), (300, 9), (350, 8), (400, 7), (450, 6), (550, 5), (650, 4), (850, 3), (1300, 2), ): if n_bits < k: break t = tt # Run the test t times: s = 0 r = n - 1 while (r % 2) == 0: s = s + 1 r = r // 2 for i in xrange(t): a = smallprimes[i] y = pow(a, r, n) if y != 1 and y != n - 1: j = 1 while j <= s - 1 and y != n - 1: y = pow(y, 2, n) if y == 1: miller_rabin_test_count = i + 1 return False j = j + 1 if y != n - 1: miller_rabin_test_count = i + 1 return False return True def next_prime(starting_value): """Return the smallest prime larger than the starting value.""" if starting_value < 2: return 2 result = (starting_value + 1) | 1 while not is_prime(result): result = result + 2 return result smallprimes = [ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, ] miller_rabin_test_count = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/rfc6979.py0000664005075200507520000000544214222604175015612 0ustar00hkariohkario""" RFC 6979: Deterministic Usage of the Digital Signature Algorithm (DSA) and Elliptic Curve Digital Signature Algorithm (ECDSA) http://tools.ietf.org/html/rfc6979 Many thanks to Coda Hale for his implementation in Go language: https://github.com/codahale/rfc6979 """ import hmac from binascii import hexlify from .util import number_to_string, number_to_string_crop, bit_length from ._compat import hmac_compat # bit_length was defined in this module previously so keep it for backwards # compatibility, will need to deprecate and remove it later __all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"] def bits2int(data, qlen): x = int(hexlify(data), 16) l = len(data) * 8 if l > qlen: return x >> (l - qlen) return x def bits2octets(data, order): z1 = bits2int(data, bit_length(order)) z2 = z1 - order if z2 < 0: z2 = z1 return number_to_string_crop(z2, order) # https://tools.ietf.org/html/rfc6979#section-3.2 def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""): """ Generate the ``k`` value - the nonce for DSA. :param int order: order of the DSA generator used in the signature :param int secexp: secure exponent (private key) in numeric form :param hash_func: reference to the same hash function used for generating hash, like :py:class:`hashlib.sha1` :param bytes data: hash in binary form of the signing data :param int retry_gen: how many good 'k' values to skip before returning :param bytes extra_entropy: additional added data in binary form as per section-3.6 of rfc6979 :rtype: int """ qlen = bit_length(order) holen = hash_func().digest_size rolen = (qlen + 7) // 8 bx = ( hmac_compat(number_to_string(secexp, order)), hmac_compat(bits2octets(data, order)), hmac_compat(extra_entropy), ) # Step B v = b"\x01" * holen # Step C k = b"\x00" * holen # Step D k = hmac.new(k, digestmod=hash_func) k.update(v + b"\x00") for i in bx: k.update(i) k = k.digest() # Step E v = hmac.new(k, v, hash_func).digest() # Step F k = hmac.new(k, digestmod=hash_func) k.update(v + b"\x01") for i in bx: k.update(i) k = k.digest() # Step G v = hmac.new(k, v, hash_func).digest() # Step H while True: # Step H1 t = b"" # Step H2 while len(t) < rolen: v = hmac.new(k, v, hash_func).digest() t += v # Step H3 secret = bits2int(t, qlen) if 1 <= secret < order: if retry_gen <= 0: return secret retry_gen -= 1 k = hmac.new(k, v + b"\x00", hash_func).digest() v = hmac.new(k, v, hash_func).digest() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1654868667.0 ecdsa-0.18.0/src/ecdsa/test_curves.py0000664005075200507520000003143114250645273017051 0ustar00hkariohkariotry: import unittest2 as unittest except ImportError: import unittest import base64 import pytest from .curves import ( Curve, NIST256p, curves, UnknownCurveError, PRIME_FIELD_OID, curve_by_name, ) from .ellipticcurve import CurveFp, PointJacobi, CurveEdTw from . import der from .util import number_to_string class TestParameterEncoding(unittest.TestCase): @classmethod def setUpClass(cls): # minimal, but with cofactor (excludes seed when compared to # OpenSSL output) cls.base64_params = ( "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////" "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K" "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd" "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1" "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=" ) def test_from_pem(self): pem_params = ( "-----BEGIN EC PARAMETERS-----\n" "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" "-----END EC PARAMETERS-----\n" ) curve = Curve.from_pem(pem_params) self.assertIs(curve, NIST256p) def test_from_pem_with_explicit_when_explicit_disabled(self): pem_params = ( "-----BEGIN EC PARAMETERS-----\n" "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" "-----END EC PARAMETERS-----\n" ) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_pem(pem_params, ["named_curve"]) self.assertIn("explicit curve parameters not", str(e.exception)) def test_from_pem_with_named_curve_with_named_curve_disabled(self): pem_params = ( "-----BEGIN EC PARAMETERS-----\n" "BggqhkjOPQMBBw==\n" "-----END EC PARAMETERS-----\n" ) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_pem(pem_params, ["explicit"]) self.assertIn("named_curve curve parameters not", str(e.exception)) def test_from_pem_with_wrong_header(self): pem_params = ( "-----BEGIN PARAMETERS-----\n" "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" "-----END PARAMETERS-----\n" ) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_pem(pem_params) self.assertIn("PARAMETERS PEM header", str(e.exception)) def test_to_pem(self): pem_params = ( b"-----BEGIN EC PARAMETERS-----\n" b"BggqhkjOPQMBBw==\n" b"-----END EC PARAMETERS-----\n" ) encoding = NIST256p.to_pem() self.assertEqual(pem_params, encoding) def test_compare_with_different_object(self): self.assertNotEqual(NIST256p, 256) def test_named_curve_params_der(self): encoded = NIST256p.to_der() # just the encoding of the NIST256p OID (prime256v1) self.assertEqual(b"\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07", encoded) def test_verify_that_default_is_named_curve_der(self): encoded_default = NIST256p.to_der() encoded_named = NIST256p.to_der("named_curve") self.assertEqual(encoded_default, encoded_named) def test_encoding_to_explicit_params(self): encoded = NIST256p.to_der("explicit") self.assertEqual(encoded, bytes(base64.b64decode(self.base64_params))) def test_encoding_to_unsupported_type(self): with self.assertRaises(ValueError) as e: NIST256p.to_der("unsupported") self.assertIn("Only 'named_curve'", str(e.exception)) def test_encoding_to_explicit_compressed_params(self): encoded = NIST256p.to_der("explicit", "compressed") compressed_base_point = ( "MIHAAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" "/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEIQNrF9Hy4SxCR/i85uVjpEDydwN9" "gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVR" "AgEB" ) self.assertEqual( encoded, bytes(base64.b64decode(compressed_base_point)) ) def test_decoding_explicit_from_openssl(self): # generated with openssl 1.1.1k using # openssl ecparam -name P-256 -param_enc explicit -out /tmp/file.pem p256_explicit = ( "MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" "/////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+" "kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK" "fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz" "ucrC/GMlUQIBAQ==" ) decoded = Curve.from_der(bytes(base64.b64decode(p256_explicit))) self.assertEqual(NIST256p, decoded) def test_decoding_well_known_from_explicit_params(self): curve = Curve.from_der(bytes(base64.b64decode(self.base64_params))) self.assertIs(curve, NIST256p) def test_decoding_with_incorrect_valid_encodings(self): with self.assertRaises(ValueError) as e: Curve.from_der(b"", ["explicitCA"]) self.assertIn("Only named_curve", str(e.exception)) def test_compare_curves_with_different_generators(self): curve_fp = CurveFp(23, 1, 7) base_a = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) base_b = PointJacobi(curve_fp, 1, 20, 1, 9, generator=True) curve_a = Curve("unknown", curve_fp, base_a, None) curve_b = Curve("unknown", curve_fp, base_b, None) self.assertNotEqual(curve_a, curve_b) def test_default_encode_for_custom_curve(self): curve_fp = CurveFp(23, 1, 7) base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) curve = Curve("unknown", curve_fp, base_point, None) encoded = curve.to_der() decoded = Curve.from_der(encoded) self.assertEqual(curve, decoded) expected = "MCECAQEwDAYHKoZIzj0BAQIBFzAGBAEBBAEHBAMEDQMCAQk=" self.assertEqual(encoded, bytes(base64.b64decode(expected))) def test_named_curve_encode_for_custom_curve(self): curve_fp = CurveFp(23, 1, 7) base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) curve = Curve("unknown", curve_fp, base_point, None) with self.assertRaises(UnknownCurveError) as e: curve.to_der("named_curve") self.assertIn("Can't encode curve", str(e.exception)) def test_try_decoding_binary_explicit(self): sect113r1_explicit = ( "MIGRAgEBMBwGByqGSM49AQIwEQIBcQYJKoZIzj0BAgMCAgEJMDkEDwAwiCUMpufH" "/mSc6Fgg9wQPAOi+5NPiJgdEGIvg6ccjAxUAEOcjqxTWluZ2h1YVF1b+v4/LSakE" "HwQAnXNhbzX0qxQH1zViwQ8ApSgwJ3lY7oTRMV7TGIYCDwEAAAAAAAAA2czsijnl" "bwIBAg==" ) with self.assertRaises(UnknownCurveError) as e: Curve.from_der(base64.b64decode(sect113r1_explicit)) self.assertIn("Characteristic 2 curves unsupported", str(e.exception)) def test_decode_malformed_named_curve(self): bad_der = der.encode_oid(*NIST256p.oid) + der.encode_integer(1) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_der(bad_der) self.assertIn("Unexpected data after OID", str(e.exception)) def test_decode_malformed_explicit_garbage_after_ECParam(self): bad_der = bytes( base64.b64decode(self.base64_params) ) + der.encode_integer(1) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_der(bad_der) self.assertIn("Unexpected data after ECParameters", str(e.exception)) def test_decode_malformed_unknown_version_number(self): bad_der = der.encode_sequence(der.encode_integer(2)) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_der(bad_der) self.assertIn("Unknown parameter encoding format", str(e.exception)) def test_decode_malformed_unknown_field_type(self): curve_p = NIST256p.curve.p() bad_der = der.encode_sequence( der.encode_integer(1), der.encode_sequence( der.encode_oid(1, 2, 3), der.encode_integer(curve_p) ), der.encode_sequence( der.encode_octet_string( number_to_string(NIST256p.curve.a() % curve_p, curve_p) ), der.encode_octet_string( number_to_string(NIST256p.curve.b(), curve_p) ), ), der.encode_octet_string( NIST256p.generator.to_bytes("uncompressed") ), der.encode_integer(NIST256p.generator.order()), ) with self.assertRaises(UnknownCurveError) as e: Curve.from_der(bad_der) self.assertIn("Unknown field type: (1, 2, 3)", str(e.exception)) def test_decode_malformed_garbage_after_prime(self): curve_p = NIST256p.curve.p() bad_der = der.encode_sequence( der.encode_integer(1), der.encode_sequence( der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p), der.encode_integer(1), ), der.encode_sequence( der.encode_octet_string( number_to_string(NIST256p.curve.a() % curve_p, curve_p) ), der.encode_octet_string( number_to_string(NIST256p.curve.b(), curve_p) ), ), der.encode_octet_string( NIST256p.generator.to_bytes("uncompressed") ), der.encode_integer(NIST256p.generator.order()), ) with self.assertRaises(der.UnexpectedDER) as e: Curve.from_der(bad_der) self.assertIn("Prime-p element", str(e.exception)) class TestCurveSearching(unittest.TestCase): def test_correct_name(self): c = curve_by_name("NIST256p") self.assertIs(c, NIST256p) def test_openssl_name(self): c = curve_by_name("prime256v1") self.assertIs(c, NIST256p) def test_unknown_curve(self): with self.assertRaises(UnknownCurveError) as e: curve_by_name("foo bar") self.assertIn( "name 'foo bar' unknown, only curves supported: " "['NIST192p', 'NIST224p'", str(e.exception), ) def test_with_None_as_parameter(self): with self.assertRaises(UnknownCurveError) as e: curve_by_name(None) self.assertIn( "name None unknown, only curves supported: " "['NIST192p', 'NIST224p'", str(e.exception), ) @pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) def test_curve_params_encode_decode_named(curve): ret = Curve.from_der(curve.to_der("named_curve")) assert curve == ret @pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) def test_curve_params_encode_decode_explicit(curve): if isinstance(curve.curve, CurveEdTw): with pytest.raises(UnknownCurveError): curve.to_der("explicit") else: ret = Curve.from_der(curve.to_der("explicit")) assert curve == ret @pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) def test_curve_params_encode_decode_default(curve): ret = Curve.from_der(curve.to_der()) assert curve == ret @pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) def test_curve_params_encode_decode_explicit_compressed(curve): if isinstance(curve.curve, CurveEdTw): with pytest.raises(UnknownCurveError): curve.to_der("explicit", "compressed") else: ret = Curve.from_der(curve.to_der("explicit", "compressed")) assert curve == ret ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/test_der.py0000664005075200507520000003515414221640203016304 0ustar00hkariohkario# compatibility with Python 2.6, for that we need unittest2 package, # which is not available on 3.3 or 3.4 import warnings from binascii import hexlify try: import unittest2 as unittest except ImportError: import unittest from six import b import hypothesis.strategies as st from hypothesis import given import pytest from ._compat import str_idx_as_int from .curves import NIST256p, NIST224p from .der import ( remove_integer, UnexpectedDER, read_length, encode_bitstring, remove_bitstring, remove_object, encode_oid, remove_constructed, remove_octet_string, remove_sequence, ) class TestRemoveInteger(unittest.TestCase): # DER requires the integers to be 0-padded only if they would be # interpreted as negative, check if those errors are detected def test_non_minimal_encoding(self): with self.assertRaises(UnexpectedDER): remove_integer(b("\x02\x02\x00\x01")) def test_negative_with_high_bit_set(self): with self.assertRaises(UnexpectedDER): remove_integer(b("\x02\x01\x80")) def test_minimal_with_high_bit_set(self): val, rem = remove_integer(b("\x02\x02\x00\x80")) self.assertEqual(val, 0x80) self.assertEqual(rem, b"") def test_two_zero_bytes_with_high_bit_set(self): with self.assertRaises(UnexpectedDER): remove_integer(b("\x02\x03\x00\x00\xff")) def test_zero_length_integer(self): with self.assertRaises(UnexpectedDER): remove_integer(b("\x02\x00")) def test_empty_string(self): with self.assertRaises(UnexpectedDER): remove_integer(b("")) def test_encoding_of_zero(self): val, rem = remove_integer(b("\x02\x01\x00")) self.assertEqual(val, 0) self.assertEqual(rem, b"") def test_encoding_of_127(self): val, rem = remove_integer(b("\x02\x01\x7f")) self.assertEqual(val, 127) self.assertEqual(rem, b"") def test_encoding_of_128(self): val, rem = remove_integer(b("\x02\x02\x00\x80")) self.assertEqual(val, 128) self.assertEqual(rem, b"") def test_wrong_tag(self): with self.assertRaises(UnexpectedDER) as e: remove_integer(b"\x01\x02\x00\x80") self.assertIn("wanted type 'integer'", str(e.exception)) def test_wrong_length(self): with self.assertRaises(UnexpectedDER) as e: remove_integer(b"\x02\x03\x00\x80") self.assertIn("Length longer", str(e.exception)) class TestReadLength(unittest.TestCase): # DER requires the lengths between 0 and 127 to be encoded using the short # form and lengths above that encoded with minimal number of bytes # necessary def test_zero_length(self): self.assertEqual((0, 1), read_length(b("\x00"))) def test_two_byte_zero_length(self): with self.assertRaises(UnexpectedDER): read_length(b("\x81\x00")) def test_two_byte_small_length(self): with self.assertRaises(UnexpectedDER): read_length(b("\x81\x7f")) def test_long_form_with_zero_length(self): with self.assertRaises(UnexpectedDER): read_length(b("\x80")) def test_smallest_two_byte_length(self): self.assertEqual((128, 2), read_length(b("\x81\x80"))) def test_zero_padded_length(self): with self.assertRaises(UnexpectedDER): read_length(b("\x82\x00\x80")) def test_two_three_byte_length(self): self.assertEqual((256, 3), read_length(b"\x82\x01\x00")) def test_empty_string(self): with self.assertRaises(UnexpectedDER): read_length(b("")) def test_length_overflow(self): with self.assertRaises(UnexpectedDER): read_length(b("\x83\x01\x00")) class TestEncodeBitstring(unittest.TestCase): # DER requires BIT STRINGS to include a number of padding bits in the # encoded byte string, that padding must be between 0 and 7 def test_old_call_convention(self): """This is the old way to use the function.""" warnings.simplefilter("always") with pytest.warns(DeprecationWarning) as warns: der = encode_bitstring(b"\x00\xff") self.assertEqual(len(warns), 1) self.assertIn( "unused= needs to be specified", warns[0].message.args[0] ) self.assertEqual(der, b"\x03\x02\x00\xff") def test_new_call_convention(self): """This is how it should be called now.""" warnings.simplefilter("always") with pytest.warns(None) as warns: der = encode_bitstring(b"\xff", 0) # verify that new call convention doesn't raise Warnings self.assertEqual(len(warns), 0) self.assertEqual(der, b"\x03\x02\x00\xff") def test_implicit_unused_bits(self): """ Writing bit string with already included the number of unused bits. """ warnings.simplefilter("always") with pytest.warns(None) as warns: der = encode_bitstring(b"\x00\xff", None) # verify that new call convention doesn't raise Warnings self.assertEqual(len(warns), 0) self.assertEqual(der, b"\x03\x02\x00\xff") def test_explicit_unused_bits(self): der = encode_bitstring(b"\xff\xf0", 4) self.assertEqual(der, b"\x03\x03\x04\xff\xf0") def test_empty_string(self): self.assertEqual(encode_bitstring(b"", 0), b"\x03\x01\x00") def test_invalid_unused_count(self): with self.assertRaises(ValueError): encode_bitstring(b"\xff\x00", 8) def test_invalid_unused_with_empty_string(self): with self.assertRaises(ValueError): encode_bitstring(b"", 1) def test_non_zero_padding_bits(self): with self.assertRaises(ValueError): encode_bitstring(b"\xff", 2) class TestRemoveBitstring(unittest.TestCase): def test_old_call_convention(self): """This is the old way to call the function.""" warnings.simplefilter("always") with pytest.warns(DeprecationWarning) as warns: bits, rest = remove_bitstring(b"\x03\x02\x00\xff") self.assertEqual(len(warns), 1) self.assertIn( "expect_unused= needs to be specified", warns[0].message.args[0] ) self.assertEqual(bits, b"\x00\xff") self.assertEqual(rest, b"") def test_new_call_convention(self): warnings.simplefilter("always") with pytest.warns(None) as warns: bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0) self.assertEqual(len(warns), 0) self.assertEqual(bits, b"\xff") self.assertEqual(rest, b"") def test_implicit_unexpected_unused(self): warnings.simplefilter("always") with pytest.warns(None) as warns: bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None) self.assertEqual(len(warns), 0) self.assertEqual(bits, (b"\xff", 0)) self.assertEqual(rest, b"") def test_with_padding(self): ret, rest = remove_bitstring(b"\x03\x02\x04\xf0", None) self.assertEqual(ret, (b"\xf0", 4)) self.assertEqual(rest, b"") def test_not_a_bitstring(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x02\x02\x00\xff", None) def test_empty_encoding(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03\x00", None) def test_empty_string(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"", None) def test_no_length(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03", None) def test_unexpected_number_of_unused_bits(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03\x02\x00\xff", 1) def test_invalid_encoding_of_unused_bits(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03\x03\x08\xff\x00", None) def test_invalid_encoding_of_empty_string(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03\x01\x01", None) def test_invalid_padding_bits(self): with self.assertRaises(UnexpectedDER): remove_bitstring(b"\x03\x02\x01\xff", None) class TestStrIdxAsInt(unittest.TestCase): def test_str(self): self.assertEqual(115, str_idx_as_int("str", 0)) def test_bytes(self): self.assertEqual(115, str_idx_as_int(b"str", 0)) def test_bytearray(self): self.assertEqual(115, str_idx_as_int(bytearray(b"str"), 0)) class TestEncodeOid(unittest.TestCase): def test_pub_key_oid(self): oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201")) def test_nist224p_oid(self): self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021")) def test_nist256p_oid(self): self.assertEqual( hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107" ) def test_large_second_subid(self): # from X.690, section 8.19.5 oid = encode_oid(2, 999, 3) self.assertEqual(oid, b"\x06\x03\x88\x37\x03") def test_with_two_subids(self): oid = encode_oid(2, 999) self.assertEqual(oid, b"\x06\x02\x88\x37") def test_zero_zero(self): oid = encode_oid(0, 0) self.assertEqual(oid, b"\x06\x01\x00") def test_with_wrong_types(self): with self.assertRaises((TypeError, AssertionError)): encode_oid(0, None) def test_with_small_first_large_second(self): with self.assertRaises(AssertionError): encode_oid(1, 40) def test_small_first_max_second(self): oid = encode_oid(1, 39) self.assertEqual(oid, b"\x06\x01\x4f") def test_with_invalid_first(self): with self.assertRaises(AssertionError): encode_oid(3, 39) class TestRemoveObject(unittest.TestCase): @classmethod def setUpClass(cls): cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) def test_pub_key_oid(self): oid, rest = remove_object(self.oid_ecPublicKey) self.assertEqual(rest, b"") self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) def test_with_extra_bytes(self): oid, rest = remove_object(self.oid_ecPublicKey + b"more") self.assertEqual(rest, b"more") self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) def test_with_large_second_subid(self): # from X.690, section 8.19.5 oid, rest = remove_object(b"\x06\x03\x88\x37\x03") self.assertEqual(rest, b"") self.assertEqual(oid, (2, 999, 3)) def test_with_padded_first_subid(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x02\x80\x00") def test_with_padded_second_subid(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x04\x88\x37\x80\x01") def test_with_missing_last_byte_of_multi_byte(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x03\x88\x37\x83") def test_with_two_subids(self): oid, rest = remove_object(b"\x06\x02\x88\x37") self.assertEqual(rest, b"") self.assertEqual(oid, (2, 999)) def test_zero_zero(self): oid, rest = remove_object(b"\x06\x01\x00") self.assertEqual(rest, b"") self.assertEqual(oid, (0, 0)) def test_empty_string(self): with self.assertRaises(UnexpectedDER): remove_object(b"") def test_missing_length(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06") def test_empty_oid(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x00") def test_empty_oid_overflow(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x01") def test_with_wrong_type(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x04\x02\x88\x37") def test_with_too_long_length(self): with self.assertRaises(UnexpectedDER): remove_object(b"\x06\x03\x88\x37") class TestRemoveConstructed(unittest.TestCase): def test_simple(self): data = b"\xa1\x02\xff\xaa" tag, body, rest = remove_constructed(data) self.assertEqual(tag, 0x01) self.assertEqual(body, b"\xff\xaa") self.assertEqual(rest, b"") def test_with_malformed_tag(self): data = b"\x01\x02\xff\xaa" with self.assertRaises(UnexpectedDER) as e: remove_constructed(data) self.assertIn("constructed tag", str(e.exception)) class TestRemoveOctetString(unittest.TestCase): def test_simple(self): data = b"\x04\x03\xaa\xbb\xcc" body, rest = remove_octet_string(data) self.assertEqual(body, b"\xaa\xbb\xcc") self.assertEqual(rest, b"") def test_with_malformed_tag(self): data = b"\x03\x03\xaa\xbb\xcc" with self.assertRaises(UnexpectedDER) as e: remove_octet_string(data) self.assertIn("octetstring", str(e.exception)) class TestRemoveSequence(unittest.TestCase): def test_simple(self): data = b"\x30\x02\xff\xaa" body, rest = remove_sequence(data) self.assertEqual(body, b"\xff\xaa") self.assertEqual(rest, b"") def test_with_empty_string(self): with self.assertRaises(UnexpectedDER) as e: remove_sequence(b"") self.assertIn("Empty string", str(e.exception)) def test_with_wrong_tag(self): data = b"\x20\x02\xff\xaa" with self.assertRaises(UnexpectedDER) as e: remove_sequence(data) self.assertIn("wanted type 'sequence'", str(e.exception)) def test_with_wrong_length(self): data = b"\x30\x03\xff\xaa" with self.assertRaises(UnexpectedDER) as e: remove_sequence(data) self.assertIn("Length longer", str(e.exception)) @st.composite def st_oid(draw, max_value=2**512, max_size=50): """ Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples :param max_value: maximum value of any single sub-identifier :param max_size: maximum length of the generated OID """ first = draw(st.integers(min_value=0, max_value=2)) if first < 2: second = draw(st.integers(min_value=0, max_value=39)) else: second = draw(st.integers(min_value=0, max_value=max_value)) rest = draw( st.lists( st.integers(min_value=0, max_value=max_value), max_size=max_size ) ) return (first, second) + tuple(rest) @given(st_oid()) def test_oids(ids): encoded_oid = encode_oid(*ids) decoded_oid, rest = remove_object(encoded_oid) assert rest == b"" assert decoded_oid == ids ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/test_ecdh.py0000664005075200507520000003560614221640203016437 0ustar00hkariohkarioimport os import shutil import subprocess import pytest from binascii import unhexlify try: import unittest2 as unittest except ImportError: import unittest from .curves import ( NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, BRAINPOOLP160r1, ) from .curves import curves from .ecdh import ( ECDH, InvalidCurveError, InvalidSharedSecretError, NoKeyError, NoCurveError, ) from .keys import SigningKey, VerifyingKey from .ellipticcurve import CurveEdTw @pytest.mark.parametrize( "vcurve", curves, ids=[curve.name for curve in curves], ) def test_ecdh_each(vcurve): if isinstance(vcurve.curve, CurveEdTw): pytest.skip("ECDH is not supported for Edwards curves") ecdh1 = ECDH(curve=vcurve) ecdh2 = ECDH(curve=vcurve) ecdh2.generate_private_key() ecdh1.load_received_public_key(ecdh2.get_public_key()) ecdh2.load_received_public_key(ecdh1.generate_private_key()) secret1 = ecdh1.generate_sharedsecret_bytes() secret2 = ecdh2.generate_sharedsecret_bytes() assert secret1 == secret2 def test_ecdh_both_keys_present(): key1 = SigningKey.generate(BRAINPOOLP160r1) key2 = SigningKey.generate(BRAINPOOLP160r1) ecdh1 = ECDH(BRAINPOOLP160r1, key1, key2.verifying_key) ecdh2 = ECDH(private_key=key2, public_key=key1.verifying_key) secret1 = ecdh1.generate_sharedsecret_bytes() secret2 = ecdh2.generate_sharedsecret_bytes() assert secret1 == secret2 def test_ecdh_no_public_key(): ecdh1 = ECDH(curve=NIST192p) with pytest.raises(NoKeyError): ecdh1.generate_sharedsecret_bytes() ecdh1.generate_private_key() with pytest.raises(NoKeyError): ecdh1.generate_sharedsecret_bytes() class TestECDH(unittest.TestCase): def test_load_key_from_wrong_curve(self): ecdh1 = ECDH() ecdh1.set_curve(NIST192p) key1 = SigningKey.generate(BRAINPOOLP160r1) with self.assertRaises(InvalidCurveError) as e: ecdh1.load_private_key(key1) self.assertIn("Curve mismatch", str(e.exception)) def test_generate_without_curve(self): ecdh1 = ECDH() with self.assertRaises(NoCurveError) as e: ecdh1.generate_private_key() self.assertIn("Curve must be set", str(e.exception)) def test_load_bytes_without_curve_set(self): ecdh1 = ECDH() with self.assertRaises(NoCurveError) as e: ecdh1.load_private_key_bytes(b"\x01" * 32) self.assertIn("Curve must be set", str(e.exception)) def test_set_curve_from_received_public_key(self): ecdh1 = ECDH() key1 = SigningKey.generate(BRAINPOOLP160r1) ecdh1.load_received_public_key(key1.verifying_key) self.assertEqual(ecdh1.curve, BRAINPOOLP160r1) def test_ecdh_wrong_public_key_curve(): ecdh1 = ECDH(curve=NIST192p) ecdh1.generate_private_key() ecdh2 = ECDH(curve=NIST256p) ecdh2.generate_private_key() with pytest.raises(InvalidCurveError): ecdh1.load_received_public_key(ecdh2.get_public_key()) with pytest.raises(InvalidCurveError): ecdh2.load_received_public_key(ecdh1.get_public_key()) ecdh1.public_key = ecdh2.get_public_key() ecdh2.public_key = ecdh1.get_public_key() with pytest.raises(InvalidCurveError): ecdh1.generate_sharedsecret_bytes() with pytest.raises(InvalidCurveError): ecdh2.generate_sharedsecret_bytes() def test_ecdh_invalid_shared_secret_curve(): ecdh1 = ECDH(curve=NIST256p) ecdh1.generate_private_key() ecdh1.load_received_public_key( SigningKey.generate(NIST256p).get_verifying_key() ) ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order with pytest.raises(InvalidSharedSecretError): ecdh1.generate_sharedsecret_bytes() # https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt # https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt # https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt @pytest.mark.parametrize( "curve,privatekey,pubkey,secret", [ pytest.param( NIST192p, "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527", "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0" "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523", "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0", id="NIST192p-1", ), pytest.param( NIST192p, "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5", "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7" "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125", "c208847568b98835d7312cef1f97f7aa298283152313c29d", id="NIST192p-2", ), pytest.param( NIST192p, "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949", "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f" "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279", "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd", id="NIST192p-3", ), pytest.param( NIST192p, "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda", "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88" "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5", "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c", id="NIST192p-4", ), pytest.param( NIST192p, "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d", "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594" "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73", "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4", id="NIST192p-5", ), pytest.param( NIST192p, "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699", "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516" "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7", "f67053b934459985a315cb017bf0302891798d45d0e19508", id="NIST192p-6", ), pytest.param( NIST224p, "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd", "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280" "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7", "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8", id="NIST224p", ), pytest.param( NIST256p, "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534", "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287" "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac", "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b", id="NIST256p-1", ), pytest.param( NIST256p, "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5", "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae" "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3", "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67", id="NIST256p-2", ), pytest.param( NIST256p, "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8", "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3" "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536", "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec", id="NIST256p-3", ), pytest.param( NIST256p, "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d", "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed" "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4", "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024", id="NIST256p-4", ), pytest.param( NIST256p, "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf", "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922" "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328", "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62", id="NIST256p-5", ), pytest.param( NIST256p, "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef", "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792" "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f", "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3", id="NIST256p-6", ), pytest.param( NIST384p, "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1" "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1", "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459" "2efda27fe7513272734466b400091adbf2d68c58e0c50066" "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66" "1efedf243451915ed0905a32b060992b468c64766fc8437a", "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4" "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1", id="NIST384p", ), pytest.param( NIST521p, "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea" "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47", "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433" "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d" "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83" "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676", "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366" "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831", id="NIST521p", ), ], ) def test_ecdh_NIST(curve, privatekey, pubkey, secret): ecdh = ECDH(curve=curve) ecdh.load_private_key_bytes(unhexlify(privatekey)) ecdh.load_received_public_key_bytes(unhexlify(pubkey)) sharedsecret = ecdh.generate_sharedsecret_bytes() assert sharedsecret == unhexlify(secret) pem_local_private_key = ( "-----BEGIN EC PRIVATE KEY-----\n" "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" "bA==\n" "-----END EC PRIVATE KEY-----\n" ) der_local_private_key = ( "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864" "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06" "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c" ) pem_remote_public_key = ( "-----BEGIN PUBLIC KEY-----\n" "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" "-----END PUBLIC KEY-----\n" ) der_remote_public_key = ( "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563" "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c" ) gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea" def test_ecdh_pem(): ecdh = ECDH() ecdh.load_private_key_pem(pem_local_private_key) ecdh.load_received_public_key_pem(pem_remote_public_key) sharedsecret = ecdh.generate_sharedsecret_bytes() assert sharedsecret == unhexlify(gshared_secret) def test_ecdh_der(): ecdh = ECDH() ecdh.load_private_key_der(unhexlify(der_local_private_key)) ecdh.load_received_public_key_der(unhexlify(der_remote_public_key)) sharedsecret = ecdh.generate_sharedsecret_bytes() assert sharedsecret == unhexlify(gshared_secret) # Exception classes used by run_openssl. class RunOpenSslError(Exception): pass def run_openssl(cmd): OPENSSL = "openssl" p = subprocess.Popen( [OPENSSL] + cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) stdout, ignored = p.communicate() if p.returncode != 0: raise RunOpenSslError( "cmd '%s %s' failed: rc=%s, stdout/err was %s" % (OPENSSL, cmd, p.returncode, stdout) ) return stdout.decode() OPENSSL_SUPPORTED_CURVES = set( c.split(":")[0].strip() for c in run_openssl("ecparam -list_curves").split("\n") ) @pytest.mark.parametrize( "vcurve", curves, ids=[curve.name for curve in curves], ) def test_ecdh_with_openssl(vcurve): if isinstance(vcurve.curve, CurveEdTw): pytest.skip("Edwards curves are not supported for ECDH") assert vcurve.openssl_name if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES: pytest.skip("system openssl does not support " + vcurve.openssl_name) try: hlp = run_openssl("pkeyutl -help") if hlp.find("-derive") == 0: # pragma: no cover pytest.skip("system openssl does not support `pkeyutl -derive`") except RunOpenSslError: # pragma: no cover pytest.skip("system openssl could not be executed") if os.path.isdir("t"): # pragma: no branch shutil.rmtree("t") os.mkdir("t") run_openssl( "ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name ) run_openssl( "ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name ) run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem") ecdh1 = ECDH(curve=vcurve) ecdh2 = ECDH(curve=vcurve) with open("t/privkey1.pem") as e: key = e.read() ecdh1.load_private_key_pem(key) with open("t/privkey2.pem") as e: key = e.read() ecdh2.load_private_key_pem(key) with open("t/pubkey1.pem") as e: key = e.read() vk1 = VerifyingKey.from_pem(key) assert vk1.to_string() == ecdh1.get_public_key().to_string() vk2 = ecdh2.get_public_key() with open("t/pubkey2.pem", "wb") as e: e.write(vk2.to_pem()) ecdh1.load_received_public_key(vk2) ecdh2.load_received_public_key(vk1) secret1 = ecdh1.generate_sharedsecret_bytes() secret2 = ecdh2.generate_sharedsecret_bytes() assert secret1 == secret2 run_openssl( "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" ) run_openssl( "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" ) with open("t/secret1", "rb") as e: ssl_secret1 = e.read() with open("t/secret1", "rb") as e: ssl_secret2 = e.read() assert len(ssl_secret1) == vk1.curve.verifying_key_length // 2 assert len(secret1) == vk1.curve.verifying_key_length // 2 assert ssl_secret1 == ssl_secret2 assert secret1 == ssl_secret1 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611246732.0 ecdsa-0.18.0/src/ecdsa/test_ecdsa.py0000664005075200507520000005656314002326214016620 0ustar00hkariohkariofrom __future__ import print_function import sys import hypothesis.strategies as st from hypothesis import given, settings, note, example try: import unittest2 as unittest except ImportError: import unittest import pytest from .ecdsa import ( Private_key, Public_key, Signature, generator_192, digest_integer, ellipticcurve, point_is_valid, generator_224, generator_256, generator_384, generator_521, generator_secp256k1, curve_192, InvalidPointError, curve_112r2, generator_112r2, int_to_string, ) HYP_SETTINGS = {} # old hypothesis doesn't have the "deadline" setting if sys.version_info > (2, 7): # pragma: no branch # SEC521p is slow, allow long execution for it HYP_SETTINGS["deadline"] = 5000 class TestP192FromX9_62(unittest.TestCase): """Check test vectors from X9.62""" @classmethod def setUpClass(cls): cls.d = 651056770906015076056810763456358567190100156695615665659 cls.Q = cls.d * generator_192 cls.k = 6140507067065001063065065565667405560006161556565665656654 cls.R = cls.k * generator_192 cls.msg = 968236873715988614170569073515315707566766479517 cls.pubk = Public_key(generator_192, generator_192 * cls.d) cls.privk = Private_key(cls.pubk, cls.d) cls.sig = cls.privk.sign(cls.msg, cls.k) def test_point_multiplication(self): assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 def test_point_multiplication_2(self): assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 def test_mult_and_addition(self): u1 = 2563697409189434185194736134579731015366492496392189760599 u2 = 6266643813348617967186477710235785849136406323338782220568 temp = u1 * generator_192 + u2 * self.Q assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 def test_signature(self): r, s = self.sig.r, self.sig.s assert r == 3342403536405981729393488334694600415596881826869351677613 assert s == 5735822328888155254683894997897571951568553642892029982342 def test_verification(self): assert self.pubk.verifies(self.msg, self.sig) def test_rejection(self): assert not self.pubk.verifies(self.msg - 1, self.sig) class TestPublicKey(unittest.TestCase): def test_equality_public_keys(self): gen = generator_192 x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point = ellipticcurve.Point(gen.curve(), x, y) pub_key1 = Public_key(gen, point) pub_key2 = Public_key(gen, point) self.assertEqual(pub_key1, pub_key2) def test_inequality_public_key(self): gen = generator_192 x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point1 = ellipticcurve.Point(gen.curve(), x1, y1) x2 = 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15 y2 = 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF point2 = ellipticcurve.Point(gen.curve(), x2, y2) pub_key1 = Public_key(gen, point1) pub_key2 = Public_key(gen, point2) self.assertNotEqual(pub_key1, pub_key2) def test_inequality_different_curves(self): gen = generator_192 x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point1 = ellipticcurve.Point(gen.curve(), x1, y1) x2 = 0x722BA0FB6B8FC8898A4C6AB49E66 y2 = 0x2B7344BB57A7ABC8CA0F1A398C7D point2 = ellipticcurve.Point(generator_112r2.curve(), x2, y2) pub_key1 = Public_key(gen, point1) pub_key2 = Public_key(generator_112r2, point2) self.assertNotEqual(pub_key1, pub_key2) def test_inequality_public_key_not_implemented(self): gen = generator_192 x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point = ellipticcurve.Point(gen.curve(), x, y) pub_key = Public_key(gen, point) self.assertNotEqual(pub_key, None) def test_public_key_with_generator_without_order(self): gen = ellipticcurve.PointJacobi( generator_192.curve(), generator_192.x(), generator_192.y(), 1 ) x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point = ellipticcurve.Point(gen.curve(), x, y) with self.assertRaises(InvalidPointError) as e: Public_key(gen, point) self.assertIn("Generator point must have order", str(e.exception)) def test_public_point_on_curve_not_scalar_multiple_of_base_point(self): x = 2 y = 0xBE6AA4938EF7CFE6FE29595B6B00 # we need a curve with cofactor != 1 point = ellipticcurve.PointJacobi(curve_112r2, x, y, 1) self.assertTrue(curve_112r2.contains_point(x, y)) with self.assertRaises(InvalidPointError) as e: Public_key(generator_112r2, point) self.assertIn("Generator point order", str(e.exception)) def test_point_is_valid_with_not_scalar_multiple_of_base_point(self): x = 2 y = 0xBE6AA4938EF7CFE6FE29595B6B00 self.assertFalse(point_is_valid(generator_112r2, x, y)) # the tests to verify the extensiveness of tests in ecdsa.ecdsa # if PointJacobi gets modified to calculate the x and y mod p the tests # below will need to use a fake/mock object def test_invalid_point_x_negative(self): pt = ellipticcurve.PointJacobi(curve_192, -1, 0, 1) with self.assertRaises(InvalidPointError) as e: Public_key(generator_192, pt) self.assertIn("The public point has x or y", str(e.exception)) def test_invalid_point_x_equal_p(self): pt = ellipticcurve.PointJacobi(curve_192, curve_192.p(), 0, 1) with self.assertRaises(InvalidPointError) as e: Public_key(generator_192, pt) self.assertIn("The public point has x or y", str(e.exception)) def test_invalid_point_y_negative(self): pt = ellipticcurve.PointJacobi(curve_192, 0, -1, 1) with self.assertRaises(InvalidPointError) as e: Public_key(generator_192, pt) self.assertIn("The public point has x or y", str(e.exception)) def test_invalid_point_y_equal_p(self): pt = ellipticcurve.PointJacobi(curve_192, 0, curve_192.p(), 1) with self.assertRaises(InvalidPointError) as e: Public_key(generator_192, pt) self.assertIn("The public point has x or y", str(e.exception)) class TestPublicKeyVerifies(unittest.TestCase): # test all the different ways that a signature can be publicly invalid @classmethod def setUpClass(cls): gen = generator_192 x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point = ellipticcurve.Point(gen.curve(), x, y) cls.pub_key = Public_key(gen, point) def test_sig_with_r_zero(self): sig = Signature(0, 1) self.assertFalse(self.pub_key.verifies(1, sig)) def test_sig_with_r_order(self): sig = Signature(generator_192.order(), 1) self.assertFalse(self.pub_key.verifies(1, sig)) def test_sig_with_s_zero(self): sig = Signature(1, 0) self.assertFalse(self.pub_key.verifies(1, sig)) def test_sig_with_s_order(self): sig = Signature(1, generator_192.order()) self.assertFalse(self.pub_key.verifies(1, sig)) class TestPrivateKey(unittest.TestCase): @classmethod def setUpClass(cls): gen = generator_192 x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F point = ellipticcurve.Point(gen.curve(), x, y) cls.pub_key = Public_key(gen, point) def test_equality_private_keys(self): pr_key1 = Private_key(self.pub_key, 100) pr_key2 = Private_key(self.pub_key, 100) self.assertEqual(pr_key1, pr_key2) def test_inequality_private_keys(self): pr_key1 = Private_key(self.pub_key, 100) pr_key2 = Private_key(self.pub_key, 200) self.assertNotEqual(pr_key1, pr_key2) def test_inequality_private_keys_not_implemented(self): pr_key = Private_key(self.pub_key, 100) self.assertNotEqual(pr_key, None) # Testing point validity, as per ECDSAVS.pdf B.2.2: P192_POINTS = [ ( generator_192, 0xCD6D0F029A023E9AACA429615B8F577ABEE685D8257CC83A, 0x00019C410987680E9FB6C0B6ECC01D9A2647C8BAE27721BACDFC, False, ), ( generator_192, 0x00017F2FCE203639E9EAF9FB50B81FC32776B30E3B02AF16C73B, 0x95DA95C5E72DD48E229D4748D4EEE658A9A54111B23B2ADB, False, ), ( generator_192, 0x4F77F8BC7FCCBADD5760F4938746D5F253EE2168C1CF2792, 0x000147156FF824D131629739817EDB197717C41AAB5C2A70F0F6, False, ), ( generator_192, 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6, 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F, True, ), ( generator_192, 0xCDF56C1AA3D8AFC53C521ADF3FFB96734A6A630A4A5B5A70, 0x97C1C44A5FB229007B5EC5D25F7413D170068FFD023CAA4E, True, ), ( generator_192, 0x89009C0DC361C81E99280C8E91DF578DF88CDF4B0CDEDCED, 0x27BE44A529B7513E727251F128B34262A0FD4D8EC82377B9, True, ), ( generator_192, 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15, 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF, True, ), ( generator_192, 0x6DCCBDE75C0948C98DAB32EA0BC59FE125CF0FB1A3798EDA, 0x0001171A3E0FA60CF3096F4E116B556198DE430E1FBD330C8835, False, ), ( generator_192, 0xD266B39E1F491FC4ACBBBC7D098430931CFA66D55015AF12, 0x193782EB909E391A3148B7764E6B234AA94E48D30A16DBB2, False, ), ( generator_192, 0x9D6DDBCD439BAA0C6B80A654091680E462A7D1D3F1FFEB43, 0x6AD8EFC4D133CCF167C44EB4691C80ABFFB9F82B932B8CAA, False, ), ( generator_192, 0x146479D944E6BDA87E5B35818AA666A4C998A71F4E95EDBC, 0xA86D6FE62BC8FBD88139693F842635F687F132255858E7F6, False, ), ( generator_192, 0xE594D4A598046F3598243F50FD2C7BD7D380EDB055802253, 0x509014C0C4D6B536E3CA750EC09066AF39B4C8616A53A923, False, ), ] @pytest.mark.parametrize("generator,x,y,expected", P192_POINTS) def test_point_validity(generator, x, y, expected): """ `generator` defines the curve; is `(x, y)` a point on this curve? `expected` is True if the right answer is Yes. """ assert point_is_valid(generator, x, y) == expected # Trying signature-verification tests from ECDSAVS.pdf B.2.4: CURVE_192_KATS = [ ( generator_192, int( "0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee" "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30" "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79" "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1" "58", 16, ), 0xD9DBFB332AA8E5FF091E8CE535857C37C73F6250FFB2E7AC, 0x282102E364FEDED3AD15DDF968F88D8321AA268DD483EBC4, 0x64DCA58A20787C488D11D6DD96313F1B766F2D8EFE122916, 0x1ECBA28141E84AB4ECAD92F56720E2CC83EB3D22DEC72479, True, ), ( generator_192, int( "0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1" "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a" "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3" "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63" "f4", 16, ), 0x3E53EF8D3112AF3285C0E74842090712CD324832D4277AE7, 0xCC75F8952D30AEC2CBB719FC6AA9934590B5D0FF5A83ADB7, 0x8285261607283BA18F335026130BAB31840DCFD9C3E555AF, 0x356D89E1B04541AFC9704A45E9C535CE4A50929E33D7E06C, True, ), ( generator_192, int( "0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911" "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd" "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30" "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42" "dd", 16, ), 0x16335DBE95F8E8254A4E04575D736BEFB258B8657F773CB7, 0x421B13379C59BC9DCE38A1099CA79BBD06D647C7F6242336, 0x4141BD5D64EA36C5B0BD21EF28C02DA216ED9D04522B1E91, 0x159A6AA852BCC579E821B7BB0994C0861FB08280C38DAA09, False, ), ( generator_192, int( "0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309" "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8" "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447" "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd" "8a", 16, ), 0xFD14CDF1607F5EFB7B1793037B15BDF4BAA6F7C16341AB0B, 0x83FA0795CC6C4795B9016DAC928FD6BAC32F3229A96312C4, 0x8DFDB832951E0167C5D762A473C0416C5C15BC1195667DC1, 0x1720288A2DC13FA1EC78F763F8FE2FF7354A7E6FDDE44520, False, ), ( generator_192, int( "0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919" "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196" "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc" "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072" "fb", 16, ), 0x674F941DC1A1F8B763C9334D726172D527B90CA324DB8828, 0x65ADFA32E8B236CB33A3E84CF59BFB9417AE7E8EDE57A7FF, 0x9508B9FDD7DAF0D8126F9E2BC5A35E4C6D800B5B804D7796, 0x36F2BF6B21B987C77B53BB801B3435A577E3D493744BFAB0, False, ), ( generator_192, int( "0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c" "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa" "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc" "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca" "6d", 16, ), 0x10ECCA1AAD7220B56A62008B35170BFD5E35885C4014A19F, 0x04EB61984C6C12ADE3BC47F3C629ECE7AA0A033B9948D686, 0x82BFA4E82C0DFE9274169B86694E76CE993FD83B5C60F325, 0xA97685676C59A65DBDE002FE9D613431FB183E8006D05633, False, ), ( generator_192, int( "0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f" "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98" "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2" "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76" "e1", 16, ), 0x6636653CB5B894CA65C448277B29DA3AD101C4C2300F7C04, 0xFDF1CBB3FC3FD6A4F890B59E554544175FA77DBDBEB656C1, 0xEAC2DDECDDFB79931A9C3D49C08DE0645C783A24CB365E1C, 0x3549FEE3CFA7E5F93BC47D92D8BA100E881A2A93C22F8D50, False, ), ( generator_192, int( "0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6" "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7" "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b" "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b" "a2", 16, ), 0xA82BD718D01D354001148CD5F69B9EBF38FF6F21898F8AAA, 0xE67CEEDE07FC2EBFAFD62462A51E4B6C6B3D5B537B7CAF3E, 0x4D292486C620C3DE20856E57D3BB72FCDE4A73AD26376955, 0xA85289591A6081D5728825520E62FF1C64F94235C04C7F95, False, ), ( generator_192, int( "0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a" "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91" "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53" "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6" "58", 16, ), 0x7D3B016B57758B160C4FCA73D48DF07AE3B6B30225126C2F, 0x4AF3790D9775742BDE46F8DA876711BE1B65244B2B39E7EC, 0x95F778F5F656511A5AB49A5D69DDD0929563C29CBC3A9E62, 0x75C87FC358C251B4C83D2DD979FAAD496B539F9F2EE7A289, False, ), ( generator_192, int( "0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102" "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9" "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e" "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9" "7a", 16, ), 0x9362F28C4EF96453D8A2F849F21E881CD7566887DA8BEB4A, 0xE64D26D8D74C48A024AE85D982EE74CD16046F4EE5333905, 0xF3923476A296C88287E8DE914B0B324AD5A963319A4FE73B, 0xF0BAEED7624ED00D15244D8BA2AEDE085517DBDEC8AC65F5, True, ), ( generator_192, int( "0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645" "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90" "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c" "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045" "6d", 16, ), 0xCC6FC032A846AAAC25533EB033522824F94E670FA997ECEF, 0xE25463EF77A029ECCDA8B294FD63DD694E38D223D30862F1, 0x066B1D07F3A40E679B620EDA7F550842A35C18B80C5EBE06, 0xA0B0FB201E8F2DF65E2C4508EF303BDC90D934016F16B2DC, False, ), ( generator_192, int( "0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae" "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e" "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4" "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839" "d7", 16, ), 0x955C908FE900A996F7E2089BEE2F6376830F76A19135E753, 0xBA0C42A91D3847DE4A592A46DC3FDAF45A7CC709B90DE520, 0x1F58AD77FC04C782815A1405B0925E72095D906CBF52A668, 0xF2E93758B3AF75EDF784F05A6761C9B9A6043C66B845B599, False, ), ( generator_192, int( "0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866" "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412" "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52" "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce" "f3", 16, ), 0x31F7FA05576D78A949B24812D4383107A9A45BB5FCCDD835, 0x8DC0EB65994A90F02B5E19BD18B32D61150746C09107E76B, 0xBE26D59E4E883DDE7C286614A767B31E49AD88789D3A78FF, 0x8762CA831C1CE42DF77893C9B03119428E7A9B819B619068, False, ), ( generator_192, int( "0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f" "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357" "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670" "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1", 16, ), 0x66AA8EDBBDB5CF8E28CEB51B5BDA891CAE2DF84819FE25C0, 0x0C6BC2F69030A7CE58D4A00E3B3349844784A13B8936F8DA, 0xA4661E69B1734F4A71B788410A464B71E7FFE42334484F23, 0x738421CF5E049159D69C57A915143E226CAC8355E149AFE9, False, ), ( generator_192, int( "0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af" "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461" "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d" "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb", 16, ), 0xBCFACF45139B6F5F690A4C35A5FFFA498794136A2353FC77, 0x6F4A6C906316A6AFC6D98FE1F0399D056F128FE0270B0F22, 0x9DB679A3DAFE48F7CCAD122933ACFE9DA0970B71C94C21C1, 0x984C2DB99827576C0A41A5DA41E07D8CC768BC82F18C9DA9, False, ), ] @pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS) def test_signature_validity(gen, msg, qx, qy, r, s, expected): """ `msg` = message, `qx` and `qy` represent the base point on elliptic curve of `gen`, `r` and `s` are the signature, and `expected` is True iff the signature is expected to be valid.""" pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy)) assert expected == pubk.verifies(digest_integer(msg), Signature(r, s)) @pytest.mark.parametrize( "gen,msg,qx,qy,r,s,expected", [x for x in CURVE_192_KATS if x[6]] ) def test_pk_recovery(gen, msg, r, s, qx, qy, expected): del expected sign = Signature(r, s) pks = sign.recover_public_keys(digest_integer(msg), gen) assert pks # Test if the signature is valid for all found public keys for pk in pks: q = pk.point test_signature_validity(gen, msg, q.x(), q.y(), r, s, True) # Test if the original public key is in the set of found keys original_q = ellipticcurve.Point(gen.curve(), qx, qy) points = [pk.point for pk in pks] assert original_q in points @st.composite def st_random_gen_key_msg_nonce(draw): """Hypothesis strategy for test_sig_verify().""" name_gen = { "generator_192": generator_192, "generator_224": generator_224, "generator_256": generator_256, "generator_secp256k1": generator_secp256k1, "generator_384": generator_384, "generator_521": generator_521, } name = draw(st.sampled_from(sorted(name_gen.keys()))) note("Generator used: {0}".format(name)) generator = name_gen[name] order = int(generator.order()) key = draw(st.integers(min_value=1, max_value=order)) msg = draw(st.integers(min_value=1, max_value=order)) nonce = draw( st.integers(min_value=1, max_value=order + 1) | st.integers(min_value=order >> 1, max_value=order) ) return generator, key, msg, nonce SIG_VER_SETTINGS = dict(HYP_SETTINGS) SIG_VER_SETTINGS["max_examples"] = 10 @settings(**SIG_VER_SETTINGS) @example((generator_224, 4, 1, 1)) @given(st_random_gen_key_msg_nonce()) def test_sig_verify(args): """ Check if signing and verification works for arbitrary messages and that signatures for other messages are rejected. """ generator, sec_mult, msg, nonce = args pubkey = Public_key(generator, generator * sec_mult) privkey = Private_key(pubkey, sec_mult) signature = privkey.sign(msg, nonce) assert pubkey.verifies(msg, signature) assert not pubkey.verifies(msg - 1, signature) def test_int_to_string_with_zero(): assert int_to_string(0) == b"\x00" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/test_eddsa.py0000664005075200507520000007754714221640203016626 0ustar00hkariohkarioimport pickle import hashlib import pytest try: import unittest2 as unittest except ImportError: import unittest from hypothesis import given, settings, example import hypothesis.strategies as st from .ellipticcurve import PointEdwards, INFINITY, CurveEdTw from .eddsa import ( generator_ed25519, curve_ed25519, generator_ed448, curve_ed448, PrivateKey, PublicKey, ) from .ecdsa import generator_256, curve_256 from .errors import MalformedPointError from ._compat import a2b_hex, compat26_str class TestA2B_Hex(unittest.TestCase): def test_invalid_input(self): with self.assertRaises(ValueError): a2b_hex("abcdefghi") def test_ed25519_curve_compare(): assert curve_ed25519 != curve_256 def test_ed25519_and_ed448_compare(): assert curve_ed448 != curve_ed25519 def test_ed25519_and_custom_curve_compare(): a = CurveEdTw(curve_ed25519.p(), -curve_ed25519.a(), 1) assert curve_ed25519 != a def test_ed25519_and_almost_exact_curve_compare(): a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), 1) assert curve_ed25519 != a def test_ed25519_and_same_curve_params(): a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), curve_ed25519.d()) assert curve_ed25519 == a assert not (curve_ed25519 != a) def test_ed25519_contains_point(): g = generator_ed25519 assert curve_ed25519.contains_point(g.x(), g.y()) def test_ed25519_contains_point_bad(): assert not curve_ed25519.contains_point(1, 1) def test_ed25519_double(): a = generator_ed25519 z = a.double() assert isinstance(z, PointEdwards) x2 = int( "24727413235106541002554574571675588834622768167397638456726423" "682521233608206" ) y2 = int( "15549675580280190176352668710449542251549572066445060580507079" "593062643049417" ) b = PointEdwards(curve_ed25519, x2, y2, 1, x2 * y2) assert z == b assert a != b def test_ed25519_add_as_double(): a = generator_ed25519 z = a + a assert isinstance(z, PointEdwards) b = generator_ed25519.double() assert z == b def test_ed25519_double_infinity(): a = PointEdwards(curve_ed25519, 0, 1, 1, 0) z = a.double() assert z is INFINITY def test_ed25519_double_badly_encoded_infinity(): # invalid point, mostly to make instrumental happy a = PointEdwards(curve_ed25519, 1, 1, 1, 0) z = a.double() assert z is INFINITY def test_ed25519_eq_with_different_z(): x = generator_ed25519.x() y = generator_ed25519.y() p = curve_ed25519.p() a = PointEdwards(curve_ed25519, x * 2 % p, y * 2 % p, 2, x * y * 2 % p) b = PointEdwards(curve_ed25519, x * 3 % p, y * 3 % p, 3, x * y * 3 % p) assert a == b assert not (a != b) def test_ed25519_eq_against_infinity(): assert generator_ed25519 != INFINITY def test_ed25519_eq_encoded_infinity_against_infinity(): a = PointEdwards(curve_ed25519, 0, 1, 1, 0) assert a == INFINITY def test_ed25519_eq_bad_encode_of_infinity_against_infinity(): # technically incorrect encoding of the point at infinity, but we check # both X and T, so verify that just T==0 works a = PointEdwards(curve_ed25519, 1, 1, 1, 0) assert a == INFINITY def test_ed25519_eq_against_non_Edwards_point(): assert generator_ed25519 != generator_256 def test_ed25519_eq_against_negated_point(): g = generator_ed25519 neg = PointEdwards(curve_ed25519, -g.x(), g.y(), 1, -g.x() * g.y()) assert g != neg def test_ed25519_eq_x_different_y(): # not points on the curve, but __eq__ doesn't care a = PointEdwards(curve_ed25519, 1, 1, 1, 1) b = PointEdwards(curve_ed25519, 1, 2, 1, 2) assert a != b def test_ed25519_test_normalisation_and_scaling(): x = generator_ed25519.x() y = generator_ed25519.y() p = curve_ed25519.p() a = PointEdwards(curve_ed25519, x * 11 % p, y * 11 % p, 11, x * y * 11 % p) assert a.x() == x assert a.y() == y a.scale() assert a.x() == x assert a.y() == y a.scale() # second execution should be a noop assert a.x() == x assert a.y() == y def test_ed25519_add_three_times(): a = generator_ed25519 z = a + a + a x3 = int( "468967334644549386571235445953867877890461982801326656862413" "21779790909858396" ) y3 = int( "832484377853344397649037712036920113830141722629755531674120" "2210403726505172" ) b = PointEdwards(curve_ed25519, x3, y3, 1, x3 * y3) assert z == b def test_ed25519_add_to_infinity(): # generator * (order-1) x1 = int( "427838232691226969392843410947554224151809796397784248136826" "78720006717057747" ) y1 = int( "463168356949264781694283940034751631413079938662562256157830" "33603165251855960" ) inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) inf = inf_m_1 + generator_ed25519 assert inf is INFINITY def test_ed25519_add_and_mul_equivalence(): g = generator_ed25519 assert g + g == g * 2 assert g + g + g == g * 3 def test_ed25519_add_literal_infinity(): g = generator_ed25519 z = g + INFINITY assert z == g def test_ed25519_add_infinity(): inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) g = generator_ed25519 z = g + inf assert z == g z = inf + g assert z == g class TestEd25519(unittest.TestCase): def test_add_wrong_curves(self): with self.assertRaises(ValueError) as e: generator_ed25519 + generator_ed448 self.assertIn("different curve", str(e.exception)) def test_add_wrong_point_type(self): with self.assertRaises(ValueError) as e: generator_ed25519 + generator_256 self.assertIn("different curve", str(e.exception)) def test_ed25519_mul_to_order_min_1(): x1 = int( "427838232691226969392843410947554224151809796397784248136826" "78720006717057747" ) y1 = int( "463168356949264781694283940034751631413079938662562256157830" "33603165251855960" ) inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) assert generator_ed25519 * (generator_ed25519.order() - 1) == inf_m_1 def test_ed25519_mul_to_infinity(): assert generator_ed25519 * generator_ed25519.order() == INFINITY def test_ed25519_mul_to_infinity_plus_1(): g = generator_ed25519 assert g * (g.order() + 1) == g def test_ed25519_mul_and_add(): g = generator_ed25519 a = g * 128 b = g * 64 + g * 64 assert a == b def test_ed25519_mul_and_add_2(): g = generator_ed25519 a = g * 123 b = g * 120 + g * 3 assert a == b def test_ed25519_mul_infinity(): inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) z = inf * 11 assert z == INFINITY def test_ed25519_mul_by_zero(): z = generator_ed25519 * 0 assert z == INFINITY def test_ed25519_mul_by_one(): z = generator_ed25519 * 1 assert z == generator_ed25519 def test_ed25519_mul_custom_point(): # verify that multiplication without order set works g = generator_ed25519 a = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) z = a * 11 assert z == g * 11 def test_ed25519_pickle(): g = generator_ed25519 assert pickle.loads(pickle.dumps(g)) == g def test_ed448_eq_against_different_curve(): assert generator_ed25519 != generator_ed448 def test_ed448_double(): g = generator_ed448 z = g.double() assert isinstance(z, PointEdwards) x2 = int( "4845591495304045936995492052586696895690942404582120401876" "6013278705691214670908136440114445572635086627683154494739" "7859048262938744149" ) y2 = int( "4940887598674337276743026725267350893505445523037277237461" "2648447308771911703729389009346215770388834286503647778745" "3078312060500281069" ) b = PointEdwards(curve_ed448, x2, y2, 1, x2 * y2) assert z == b assert g != b def test_ed448_add_as_double(): g = generator_ed448 z = g + g b = g.double() assert z == b def test_ed448_mul_as_double(): g = generator_ed448 z = g * 2 b = g.double() assert z == b def test_ed448_add_to_infinity(): # generator * (order - 1) x1 = int( "5022586839996825903617194737881084981068517190547539260353" "6473749366191269932473977736719082931859264751085238669719" "1187378895383117729" ) y1 = int( "2988192100784814926760179304439306734375440401540802420959" "2824137233150618983587600353687865541878473398230323350346" "2500531545062832660" ) inf_m_1 = PointEdwards(curve_ed448, x1, y1, 1, x1 * y1) inf = inf_m_1 + generator_ed448 assert inf is INFINITY def test_ed448_mul_to_infinity(): g = generator_ed448 inf = g * g.order() assert inf is INFINITY def test_ed448_mul_to_infinity_plus_1(): g = generator_ed448 z = g * (g.order() + 1) assert z == g def test_ed448_add_and_mul_equivalence(): g = generator_ed448 assert g + g == g * 2 assert g + g + g == g * 3 def test_ed25519_encode(): g = generator_ed25519 g_bytes = g.to_bytes() assert len(g_bytes) == 32 exp_bytes = ( b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" ) assert g_bytes == exp_bytes def test_ed25519_decode(): exp_bytes = ( b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" ) a = PointEdwards.from_bytes(curve_ed25519, exp_bytes) assert a == generator_ed25519 class TestEdwardsMalformed(unittest.TestCase): def test_invalid_point(self): exp_bytes = ( b"\x78\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" ) with self.assertRaises(MalformedPointError): PointEdwards.from_bytes(curve_ed25519, exp_bytes) def test_invalid_length(self): exp_bytes = ( b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" b"\x66" ) with self.assertRaises(MalformedPointError) as e: PointEdwards.from_bytes(curve_ed25519, exp_bytes) self.assertIn("length", str(e.exception)) def test_ed448_invalid(self): exp_bytes = b"\xff" * 57 with self.assertRaises(MalformedPointError): PointEdwards.from_bytes(curve_ed448, exp_bytes) def test_ed448_encode(): g = generator_ed448 g_bytes = g.to_bytes() assert len(g_bytes) == 57 exp_bytes = ( b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" ) assert g_bytes == exp_bytes def test_ed448_decode(): exp_bytes = ( b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" ) a = PointEdwards.from_bytes(curve_ed448, exp_bytes) assert a == generator_ed448 class TestEdDSAEquality(unittest.TestCase): def test_equal_public_points(self): key1 = PublicKey(generator_ed25519, b"\x01" * 32) key2 = PublicKey(generator_ed25519, b"\x01" * 32) self.assertEqual(key1, key2) self.assertFalse(key1 != key2) def test_unequal_public_points(self): key1 = PublicKey(generator_ed25519, b"\x01" * 32) key2 = PublicKey(generator_ed25519, b"\x03" * 32) self.assertNotEqual(key1, key2) def test_unequal_to_string(self): key1 = PublicKey(generator_ed25519, b"\x01" * 32) key2 = b"\x01" * 32 self.assertNotEqual(key1, key2) def test_unequal_publickey_curves(self): key1 = PublicKey(generator_ed25519, b"\x01" * 32) key2 = PublicKey(generator_ed448, b"\x03" * 56 + b"\x00") self.assertNotEqual(key1, key2) self.assertTrue(key1 != key2) def test_equal_private_keys(self): key1 = PrivateKey(generator_ed25519, b"\x01" * 32) key2 = PrivateKey(generator_ed25519, b"\x01" * 32) self.assertEqual(key1, key2) self.assertFalse(key1 != key2) def test_unequal_private_keys(self): key1 = PrivateKey(generator_ed25519, b"\x01" * 32) key2 = PrivateKey(generator_ed25519, b"\x02" * 32) self.assertNotEqual(key1, key2) self.assertTrue(key1 != key2) def test_unequal_privatekey_to_string(self): key1 = PrivateKey(generator_ed25519, b"\x01" * 32) key2 = b"\x01" * 32 self.assertNotEqual(key1, key2) def test_unequal_privatekey_curves(self): key1 = PrivateKey(generator_ed25519, b"\x01" * 32) key2 = PrivateKey(generator_ed448, b"\x01" * 57) self.assertNotEqual(key1, key2) class TestInvalidEdDSAInputs(unittest.TestCase): def test_wrong_length_of_private_key(self): with self.assertRaises(ValueError): PrivateKey(generator_ed25519, b"\x01" * 31) def test_wrong_length_of_public_key(self): with self.assertRaises(ValueError): PublicKey(generator_ed25519, b"\x01" * 33) def test_wrong_cofactor_curve(self): ed_c = curve_ed25519 def _hash(data): return hashlib.new("sha512", compat26_str(data)).digest() curve = CurveEdTw(ed_c.p(), ed_c.a(), ed_c.d(), 1, _hash) g = generator_ed25519 fake_gen = PointEdwards(curve, g.x(), g.y(), 1, g.x() * g.y()) with self.assertRaises(ValueError) as e: PrivateKey(fake_gen, g.to_bytes()) self.assertIn("cofactor", str(e.exception)) def test_invalid_signature_length(self): key = PublicKey(generator_ed25519, b"\x01" * 32) with self.assertRaises(ValueError) as e: key.verify(b"", b"\x01" * 65) self.assertIn("length", str(e.exception)) def test_changing_public_key(self): key = PublicKey(generator_ed25519, b"\x01" * 32) g = key.point new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) key.point = new_g self.assertEqual(g, key.point) def test_changing_public_key_to_different_point(self): key = PublicKey(generator_ed25519, b"\x01" * 32) with self.assertRaises(ValueError) as e: key.point = generator_ed25519 self.assertIn("coordinates", str(e.exception)) def test_invalid_s_value(self): key = PublicKey( generator_ed25519, b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", ) sig_valid = bytearray( b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" ) self.assertTrue(key.verify(b"", sig_valid)) sig_invalid = bytearray(sig_valid) sig_invalid[-1] = 0xFF with self.assertRaises(ValueError): key.verify(b"", sig_invalid) def test_invalid_r_value(self): key = PublicKey( generator_ed25519, b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", ) sig_valid = bytearray( b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" ) self.assertTrue(key.verify(b"", sig_valid)) sig_invalid = bytearray(sig_valid) sig_invalid[0] = 0xE0 with self.assertRaises(ValueError): key.verify(b"", sig_invalid) HYP_SETTINGS = dict() HYP_SETTINGS["max_examples"] = 10 @settings(**HYP_SETTINGS) @example(1) @example(5) # smallest multiple that requires changing sign of x @given(st.integers(min_value=1, max_value=int(generator_ed25519.order() - 1))) def test_ed25519_encode_decode(multiple): a = generator_ed25519 * multiple b = PointEdwards.from_bytes(curve_ed25519, a.to_bytes()) assert a == b @settings(**HYP_SETTINGS) @example(1) @example(2) # smallest multiple that requires changing the sign of x @given(st.integers(min_value=1, max_value=int(generator_ed448.order() - 1))) def test_ed448_encode_decode(multiple): a = generator_ed448 * multiple b = PointEdwards.from_bytes(curve_ed448, a.to_bytes()) assert a == b @settings(**HYP_SETTINGS) @example(1) @example(2) @given(st.integers(min_value=1, max_value=int(generator_ed25519.order()) - 1)) def test_ed25519_mul_precompute_vs_naf(multiple): """Compare multiplication with and without precomputation.""" g = generator_ed25519 new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) assert g * multiple == multiple * new_g # Test vectors from RFC 8032 TEST_VECTORS = [ # TEST 1 ( generator_ed25519, "9d61b19deffd5a60ba844af492ec2cc4" "4449c5697b326919703bac031cae7f60", "d75a980182b10ab7d54bfed3c964073a" "0ee172f3daa62325af021a68f707511a", "", "e5564300c360ac729086e2cc806e828a" "84877f1eb8e5d974d873e06522490155" "5fb8821590a33bacc61e39701cf9b46b" "d25bf5f0595bbe24655141438e7a100b", ), # TEST 2 ( generator_ed25519, "4ccd089b28ff96da9db6c346ec114e0f" "5b8a319f35aba624da8cf6ed4fb8a6fb", "3d4017c3e843895a92b70aa74d1b7ebc" "9c982ccf2ec4968cc0cd55f12af4660c", "72", "92a009a9f0d4cab8720e820b5f642540" "a2b27b5416503f8fb3762223ebdb69da" "085ac1e43e15996e458f3613d0f11d8c" "387b2eaeb4302aeeb00d291612bb0c00", ), # TEST 3 ( generator_ed25519, "c5aa8df43f9f837bedb7442f31dcb7b1" "66d38535076f094b85ce3a2e0b4458f7", "fc51cd8e6218a1a38da47ed00230f058" "0816ed13ba3303ac5deb911548908025", "af82", "6291d657deec24024827e69c3abe01a3" "0ce548a284743a445e3680d7db5ac3ac" "18ff9b538d16f290ae67f760984dc659" "4a7c15e9716ed28dc027beceea1ec40a", ), # TEST 1024 ( generator_ed25519, "f5e5767cf153319517630f226876b86c" "8160cc583bc013744c6bf255f5cc0ee5", "278117fc144c72340f67d0f2316e8386" "ceffbf2b2428c9c51fef7c597f1d426e", "08b8b2b733424243760fe426a4b54908" "632110a66c2f6591eabd3345e3e4eb98" "fa6e264bf09efe12ee50f8f54e9f77b1" "e355f6c50544e23fb1433ddf73be84d8" "79de7c0046dc4996d9e773f4bc9efe57" "38829adb26c81b37c93a1b270b20329d" "658675fc6ea534e0810a4432826bf58c" "941efb65d57a338bbd2e26640f89ffbc" "1a858efcb8550ee3a5e1998bd177e93a" "7363c344fe6b199ee5d02e82d522c4fe" "ba15452f80288a821a579116ec6dad2b" "3b310da903401aa62100ab5d1a36553e" "06203b33890cc9b832f79ef80560ccb9" "a39ce767967ed628c6ad573cb116dbef" "efd75499da96bd68a8a97b928a8bbc10" "3b6621fcde2beca1231d206be6cd9ec7" "aff6f6c94fcd7204ed3455c68c83f4a4" "1da4af2b74ef5c53f1d8ac70bdcb7ed1" "85ce81bd84359d44254d95629e9855a9" "4a7c1958d1f8ada5d0532ed8a5aa3fb2" "d17ba70eb6248e594e1a2297acbbb39d" "502f1a8c6eb6f1ce22b3de1a1f40cc24" "554119a831a9aad6079cad88425de6bd" "e1a9187ebb6092cf67bf2b13fd65f270" "88d78b7e883c8759d2c4f5c65adb7553" "878ad575f9fad878e80a0c9ba63bcbcc" "2732e69485bbc9c90bfbd62481d9089b" "eccf80cfe2df16a2cf65bd92dd597b07" "07e0917af48bbb75fed413d238f5555a" "7a569d80c3414a8d0859dc65a46128ba" "b27af87a71314f318c782b23ebfe808b" "82b0ce26401d2e22f04d83d1255dc51a" "ddd3b75a2b1ae0784504df543af8969b" "e3ea7082ff7fc9888c144da2af58429e" "c96031dbcad3dad9af0dcbaaaf268cb8" "fcffead94f3c7ca495e056a9b47acdb7" "51fb73e666c6c655ade8297297d07ad1" "ba5e43f1bca32301651339e22904cc8c" "42f58c30c04aafdb038dda0847dd988d" "cda6f3bfd15c4b4c4525004aa06eeff8" "ca61783aacec57fb3d1f92b0fe2fd1a8" "5f6724517b65e614ad6808d6f6ee34df" "f7310fdc82aebfd904b01e1dc54b2927" "094b2db68d6f903b68401adebf5a7e08" "d78ff4ef5d63653a65040cf9bfd4aca7" "984a74d37145986780fc0b16ac451649" "de6188a7dbdf191f64b5fc5e2ab47b57" "f7f7276cd419c17a3ca8e1b939ae49e4" "88acba6b965610b5480109c8b17b80e1" "b7b750dfc7598d5d5011fd2dcc5600a3" "2ef5b52a1ecc820e308aa342721aac09" "43bf6686b64b2579376504ccc493d97e" "6aed3fb0f9cd71a43dd497f01f17c0e2" "cb3797aa2a2f256656168e6c496afc5f" "b93246f6b1116398a346f1a641f3b041" "e989f7914f90cc2c7fff357876e506b5" "0d334ba77c225bc307ba537152f3f161" "0e4eafe595f6d9d90d11faa933a15ef1" "369546868a7f3a45a96768d40fd9d034" "12c091c6315cf4fde7cb68606937380d" "b2eaaa707b4c4185c32eddcdd306705e" "4dc1ffc872eeee475a64dfac86aba41c" "0618983f8741c5ef68d3a101e8a3b8ca" "c60c905c15fc910840b94c00a0b9d0", "0aab4c900501b3e24d7cdf4663326a3a" "87df5e4843b2cbdb67cbf6e460fec350" "aa5371b1508f9f4528ecea23c436d94b" "5e8fcd4f681e30a6ac00a9704a188a03", ), # TEST SHA(abc) ( generator_ed25519, "833fe62409237b9d62ec77587520911e" "9a759cec1d19755b7da901b96dca3d42", "ec172b93ad5e563bf4932c70e1245034" "c35467ef2efd4d64ebf819683467e2bf", "ddaf35a193617abacc417349ae204131" "12e6fa4e89a97ea20a9eeee64b55d39a" "2192992a274fc1a836ba3c23a3feebbd" "454d4423643ce80e2a9ac94fa54ca49f", "dc2a4459e7369633a52b1bf277839a00" "201009a3efbf3ecb69bea2186c26b589" "09351fc9ac90b3ecfdfbc7c66431e030" "3dca179c138ac17ad9bef1177331a704", ), # Blank ( generator_ed448, "6c82a562cb808d10d632be89c8513ebf" "6c929f34ddfa8c9f63c9960ef6e348a3" "528c8a3fcc2f044e39a3fc5b94492f8f" "032e7549a20098f95b", "5fd7449b59b461fd2ce787ec616ad46a" "1da1342485a70e1f8a0ea75d80e96778" "edf124769b46c7061bd6783df1e50f6c" "d1fa1abeafe8256180", "", "533a37f6bbe457251f023c0d88f976ae" "2dfb504a843e34d2074fd823d41a591f" "2b233f034f628281f2fd7a22ddd47d78" "28c59bd0a21bfd3980ff0d2028d4b18a" "9df63e006c5d1c2d345b925d8dc00b41" "04852db99ac5c7cdda8530a113a0f4db" "b61149f05a7363268c71d95808ff2e65" "2600", ), # 1 octet ( generator_ed448, "c4eab05d357007c632f3dbb48489924d" "552b08fe0c353a0d4a1f00acda2c463a" "fbea67c5e8d2877c5e3bc397a659949e" "f8021e954e0a12274e", "43ba28f430cdff456ae531545f7ecd0a" "c834a55d9358c0372bfa0c6c6798c086" "6aea01eb00742802b8438ea4cb82169c" "235160627b4c3a9480", "03", "26b8f91727bd62897af15e41eb43c377" "efb9c610d48f2335cb0bd0087810f435" "2541b143c4b981b7e18f62de8ccdf633" "fc1bf037ab7cd779805e0dbcc0aae1cb" "cee1afb2e027df36bc04dcecbf154336" "c19f0af7e0a6472905e799f1953d2a0f" "f3348ab21aa4adafd1d234441cf807c0" "3a00", ), # 11 octets ( generator_ed448, "cd23d24f714274e744343237b93290f5" "11f6425f98e64459ff203e8985083ffd" "f60500553abc0e05cd02184bdb89c4cc" "d67e187951267eb328", "dcea9e78f35a1bf3499a831b10b86c90" "aac01cd84b67a0109b55a36e9328b1e3" "65fce161d71ce7131a543ea4cb5f7e9f" "1d8b00696447001400", "0c3e544074ec63b0265e0c", "1f0a8888ce25e8d458a21130879b840a" "9089d999aaba039eaf3e3afa090a09d3" "89dba82c4ff2ae8ac5cdfb7c55e94d5d" "961a29fe0109941e00b8dbdeea6d3b05" "1068df7254c0cdc129cbe62db2dc957d" "bb47b51fd3f213fb8698f064774250a5" "028961c9bf8ffd973fe5d5c206492b14" "0e00", ), # 12 octets ( generator_ed448, "258cdd4ada32ed9c9ff54e63756ae582" "fb8fab2ac721f2c8e676a72768513d93" "9f63dddb55609133f29adf86ec9929dc" "cb52c1c5fd2ff7e21b", "3ba16da0c6f2cc1f30187740756f5e79" "8d6bc5fc015d7c63cc9510ee3fd44adc" "24d8e968b6e46e6f94d19b945361726b" "d75e149ef09817f580", "64a65f3cdedcdd66811e2915", "7eeeab7c4e50fb799b418ee5e3197ff6" "bf15d43a14c34389b59dd1a7b1b85b4a" "e90438aca634bea45e3a2695f1270f07" "fdcdf7c62b8efeaf00b45c2c96ba457e" "b1a8bf075a3db28e5c24f6b923ed4ad7" "47c3c9e03c7079efb87cb110d3a99861" "e72003cbae6d6b8b827e4e6c143064ff" "3c00", ), # 13 octets ( generator_ed448, "7ef4e84544236752fbb56b8f31a23a10" "e42814f5f55ca037cdcc11c64c9a3b29" "49c1bb60700314611732a6c2fea98eeb" "c0266a11a93970100e", "b3da079b0aa493a5772029f0467baebe" "e5a8112d9d3a22532361da294f7bb381" "5c5dc59e176b4d9f381ca0938e13c6c0" "7b174be65dfa578e80", "64a65f3cdedcdd66811e2915e7", "6a12066f55331b6c22acd5d5bfc5d712" "28fbda80ae8dec26bdd306743c5027cb" "4890810c162c027468675ecf645a8317" "6c0d7323a2ccde2d80efe5a1268e8aca" "1d6fbc194d3f77c44986eb4ab4177919" "ad8bec33eb47bbb5fc6e28196fd1caf5" "6b4e7e0ba5519234d047155ac727a105" "3100", ), # 64 octets ( generator_ed448, "d65df341ad13e008567688baedda8e9d" "cdc17dc024974ea5b4227b6530e339bf" "f21f99e68ca6968f3cca6dfe0fb9f4fa" "b4fa135d5542ea3f01", "df9705f58edbab802c7f8363cfe5560a" "b1c6132c20a9f1dd163483a26f8ac53a" "39d6808bf4a1dfbd261b099bb03b3fb5" "0906cb28bd8a081f00", "bd0f6a3747cd561bdddf4640a332461a" "4a30a12a434cd0bf40d766d9c6d458e5" "512204a30c17d1f50b5079631f64eb31" "12182da3005835461113718d1a5ef944", "554bc2480860b49eab8532d2a533b7d5" "78ef473eeb58c98bb2d0e1ce488a98b1" "8dfde9b9b90775e67f47d4a1c3482058" "efc9f40d2ca033a0801b63d45b3b722e" "f552bad3b4ccb667da350192b61c508c" "f7b6b5adadc2c8d9a446ef003fb05cba" "5f30e88e36ec2703b349ca229c267083" "3900", ), # 256 octets ( generator_ed448, "2ec5fe3c17045abdb136a5e6a913e32a" "b75ae68b53d2fc149b77e504132d3756" "9b7e766ba74a19bd6162343a21c8590a" "a9cebca9014c636df5", "79756f014dcfe2079f5dd9e718be4171" "e2ef2486a08f25186f6bff43a9936b9b" "fe12402b08ae65798a3d81e22e9ec80e" "7690862ef3d4ed3a00", "15777532b0bdd0d1389f636c5f6b9ba7" "34c90af572877e2d272dd078aa1e567c" "fa80e12928bb542330e8409f31745041" "07ecd5efac61ae7504dabe2a602ede89" "e5cca6257a7c77e27a702b3ae39fc769" "fc54f2395ae6a1178cab4738e543072f" "c1c177fe71e92e25bf03e4ecb72f47b6" "4d0465aaea4c7fad372536c8ba516a60" "39c3c2a39f0e4d832be432dfa9a706a6" "e5c7e19f397964ca4258002f7c0541b5" "90316dbc5622b6b2a6fe7a4abffd9610" "5eca76ea7b98816af0748c10df048ce0" "12d901015a51f189f3888145c03650aa" "23ce894c3bd889e030d565071c59f409" "a9981b51878fd6fc110624dcbcde0bf7" "a69ccce38fabdf86f3bef6044819de11", "c650ddbb0601c19ca11439e1640dd931" "f43c518ea5bea70d3dcde5f4191fe53f" "00cf966546b72bcc7d58be2b9badef28" "743954e3a44a23f880e8d4f1cfce2d7a" "61452d26da05896f0a50da66a239a8a1" "88b6d825b3305ad77b73fbac0836ecc6" "0987fd08527c1a8e80d5823e65cafe2a" "3d00", ), # 1023 octets ( generator_ed448, "872d093780f5d3730df7c212664b37b8" "a0f24f56810daa8382cd4fa3f77634ec" "44dc54f1c2ed9bea86fafb7632d8be19" "9ea165f5ad55dd9ce8", "a81b2e8a70a5ac94ffdbcc9badfc3feb" "0801f258578bb114ad44ece1ec0e799d" "a08effb81c5d685c0c56f64eecaef8cd" "f11cc38737838cf400", "6ddf802e1aae4986935f7f981ba3f035" "1d6273c0a0c22c9c0e8339168e675412" "a3debfaf435ed651558007db4384b650" "fcc07e3b586a27a4f7a00ac8a6fec2cd" "86ae4bf1570c41e6a40c931db27b2faa" "15a8cedd52cff7362c4e6e23daec0fbc" "3a79b6806e316efcc7b68119bf46bc76" "a26067a53f296dafdbdc11c77f7777e9" "72660cf4b6a9b369a6665f02e0cc9b6e" "dfad136b4fabe723d2813db3136cfde9" "b6d044322fee2947952e031b73ab5c60" "3349b307bdc27bc6cb8b8bbd7bd32321" "9b8033a581b59eadebb09b3c4f3d2277" "d4f0343624acc817804728b25ab79717" "2b4c5c21a22f9c7839d64300232eb66e" "53f31c723fa37fe387c7d3e50bdf9813" "a30e5bb12cf4cd930c40cfb4e1fc6225" "92a49588794494d56d24ea4b40c89fc0" "596cc9ebb961c8cb10adde976a5d602b" "1c3f85b9b9a001ed3c6a4d3b1437f520" "96cd1956d042a597d561a596ecd3d173" "5a8d570ea0ec27225a2c4aaff26306d1" "526c1af3ca6d9cf5a2c98f47e1c46db9" "a33234cfd4d81f2c98538a09ebe76998" "d0d8fd25997c7d255c6d66ece6fa56f1" "1144950f027795e653008f4bd7ca2dee" "85d8e90f3dc315130ce2a00375a318c7" "c3d97be2c8ce5b6db41a6254ff264fa6" "155baee3b0773c0f497c573f19bb4f42" "40281f0b1f4f7be857a4e59d416c06b4" "c50fa09e1810ddc6b1467baeac5a3668" "d11b6ecaa901440016f389f80acc4db9" "77025e7f5924388c7e340a732e554440" "e76570f8dd71b7d640b3450d1fd5f041" "0a18f9a3494f707c717b79b4bf75c984" "00b096b21653b5d217cf3565c9597456" "f70703497a078763829bc01bb1cbc8fa" "04eadc9a6e3f6699587a9e75c94e5bab" "0036e0b2e711392cff0047d0d6b05bd2" "a588bc109718954259f1d86678a579a3" "120f19cfb2963f177aeb70f2d4844826" "262e51b80271272068ef5b3856fa8535" "aa2a88b2d41f2a0e2fda7624c2850272" "ac4a2f561f8f2f7a318bfd5caf969614" "9e4ac824ad3460538fdc25421beec2cc" "6818162d06bbed0c40a387192349db67" "a118bada6cd5ab0140ee273204f628aa" "d1c135f770279a651e24d8c14d75a605" "9d76b96a6fd857def5e0b354b27ab937" "a5815d16b5fae407ff18222c6d1ed263" "be68c95f32d908bd895cd76207ae7264" "87567f9a67dad79abec316f683b17f2d" "02bf07e0ac8b5bc6162cf94697b3c27c" "d1fea49b27f23ba2901871962506520c" "392da8b6ad0d99f7013fbc06c2c17a56" "9500c8a7696481c1cd33e9b14e40b82e" "79a5f5db82571ba97bae3ad3e0479515" "bb0e2b0f3bfcd1fd33034efc6245eddd" "7ee2086ddae2600d8ca73e214e8c2b0b" "db2b047c6a464a562ed77b73d2d841c4" "b34973551257713b753632efba348169" "abc90a68f42611a40126d7cb21b58695" "568186f7e569d2ff0f9e745d0487dd2e" "b997cafc5abf9dd102e62ff66cba87", "e301345a41a39a4d72fff8df69c98075" "a0cc082b802fc9b2b6bc503f926b65bd" "df7f4c8f1cb49f6396afc8a70abe6d8a" "ef0db478d4c6b2970076c6a0484fe76d" "76b3a97625d79f1ce240e7c576750d29" "5528286f719b413de9ada3e8eb78ed57" "3603ce30d8bb761785dc30dbc320869e" "1a00", ), ] @pytest.mark.parametrize( "generator,private_key,public_key,message,signature", TEST_VECTORS, ) def test_vectors(generator, private_key, public_key, message, signature): private_key = a2b_hex(private_key) public_key = a2b_hex(public_key) message = a2b_hex(message) signature = a2b_hex(signature) sig_key = PrivateKey(generator, private_key) ver_key = PublicKey(generator, public_key) assert sig_key.public_key().public_key() == ver_key.public_key() gen_sig = sig_key.sign(message) assert gen_sig == signature assert ver_key.verify(message, signature) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1626802567.0 ecdsa-0.18.0/src/ecdsa/test_ellipticcurve.py0000664005075200507520000001370514075604607020421 0ustar00hkariohkarioimport pytest try: import unittest2 as unittest except ImportError: import unittest from hypothesis import given, settings import hypothesis.strategies as st try: from hypothesis import HealthCheck HC_PRESENT = True except ImportError: # pragma: no cover HC_PRESENT = False from .numbertheory import inverse_mod from .ellipticcurve import CurveFp, INFINITY, Point HYP_SETTINGS = {} if HC_PRESENT: # pragma: no branch HYP_SETTINGS["suppress_health_check"] = [HealthCheck.too_slow] HYP_SETTINGS["deadline"] = 5000 # NIST Curve P-192: p = 6277101735386680763835789423207666416083908700390324961279 r = 6277101735386680763835789423176059013767194773182842284081 # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 c192 = CurveFp(p, -3, b) p192 = Point(c192, Gx, Gy, r) c_23 = CurveFp(23, 1, 1) g_23 = Point(c_23, 13, 7, 7) HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) HYP_SLOW_SETTINGS["max_examples"] = 10 @settings(**HYP_SLOW_SETTINGS) @given(st.integers(min_value=1, max_value=r + 1)) def test_p192_mult_tests(multiple): inv_m = inverse_mod(multiple, r) p1 = p192 * multiple assert p1 * inv_m == p192 def add_n_times(point, n): ret = INFINITY i = 0 while i <= n: yield ret ret = ret + point i += 1 # From X9.62 I.1 (p. 96): @pytest.mark.parametrize( "p, m, check", [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))], ids=["g_23 test with mult {0}".format(i) for i in range(9)], ) def test_add_and_mult_equivalence(p, m, check): assert p * m == check class TestCurve(unittest.TestCase): @classmethod def setUpClass(cls): cls.c_23 = CurveFp(23, 1, 1) def test_equality_curves(self): self.assertEqual(self.c_23, CurveFp(23, 1, 1)) def test_inequality_curves(self): c192 = CurveFp(p, -3, b) self.assertNotEqual(self.c_23, c192) def test_usability_in_a_hashed_collection_curves(self): {self.c_23: None} def test_hashability_curves(self): hash(self.c_23) def test_conflation_curves(self): ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2) eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23 self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1) self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4) self.assertDictEqual({c_23: None}, {eq1: None}) self.assertIn(eq2, {eq3: None}) class TestPoint(unittest.TestCase): @classmethod def setUpClass(cls): cls.c_23 = CurveFp(23, 1, 1) cls.g_23 = Point(cls.c_23, 13, 7, 7) p = 6277101735386680763835789423207666416083908700390324961279 r = 6277101735386680763835789423176059013767194773182842284081 # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 cls.c192 = CurveFp(p, -3, b) cls.p192 = Point(cls.c192, Gx, Gy, r) def test_p192(self): # Checking against some sample computations presented # in X9.62: d = 651056770906015076056810763456358567190100156695615665659 Q = d * self.p192 self.assertEqual( Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 ) k = 6140507067065001063065065565667405560006161556565665656654 R = k * self.p192 self.assertEqual( R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD ) self.assertEqual( R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 ) u1 = 2563697409189434185194736134579731015366492496392189760599 u2 = 6266643813348617967186477710235785849136406323338782220568 temp = u1 * self.p192 + u2 * Q self.assertEqual( temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD ) self.assertEqual( temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 ) def test_double_infinity(self): p1 = INFINITY p3 = p1.double() self.assertEqual(p1, p3) self.assertEqual(p3.x(), p1.x()) self.assertEqual(p3.y(), p3.y()) def test_double(self): x1, y1, x3, y3 = (3, 10, 7, 12) p1 = Point(self.c_23, x1, y1) p3 = p1.double() self.assertEqual(p3.x(), x3) self.assertEqual(p3.y(), y3) def test_multiply(self): x1, y1, m, x3, y3 = (3, 10, 2, 7, 12) p1 = Point(self.c_23, x1, y1) p3 = p1 * m self.assertEqual(p3.x(), x3) self.assertEqual(p3.y(), y3) # Trivial tests from X9.62 B.3: def test_add(self): """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20) p1 = Point(self.c_23, x1, y1) p2 = Point(self.c_23, x2, y2) p3 = p1 + p2 self.assertEqual(p3.x(), x3) self.assertEqual(p3.y(), y3) def test_add_as_double(self): """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12) p1 = Point(self.c_23, x1, y1) p2 = Point(self.c_23, x2, y2) p3 = p1 + p2 self.assertEqual(p3.x(), x3) self.assertEqual(p3.y(), y3) def test_equality_points(self): self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7)) def test_inequality_points(self): c = CurveFp(100, -3, 100) p = Point(c, 100, 100, 100) self.assertNotEqual(self.g_23, p) def test_inequality_points_diff_types(self): c = CurveFp(100, -3, 100) self.assertNotEqual(self.g_23, c) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/test_jacobi.py0000664005075200507520000004372414221640203016763 0ustar00hkariohkarioimport pickle try: import unittest2 as unittest except ImportError: import unittest import os import sys import signal import pytest import threading import platform import hypothesis.strategies as st from hypothesis import given, assume, settings, example from .ellipticcurve import CurveFp, PointJacobi, INFINITY from .ecdsa import ( generator_256, curve_256, generator_224, generator_brainpoolp160r1, curve_brainpoolp160r1, generator_112r2, ) from .numbertheory import inverse_mod from .util import randrange NO_OLD_SETTINGS = {} if sys.version_info > (2, 7): # pragma: no branch NO_OLD_SETTINGS["deadline"] = 5000 class TestJacobi(unittest.TestCase): def test___init__(self): curve = object() x = 2 y = 3 z = 1 order = 4 pj = PointJacobi(curve, x, y, z, order) self.assertEqual(pj.order(), order) self.assertIs(pj.curve(), curve) self.assertEqual(pj.x(), x) self.assertEqual(pj.y(), y) def test_add_with_different_curves(self): p_a = PointJacobi.from_affine(generator_256) p_b = PointJacobi.from_affine(generator_224) with self.assertRaises(ValueError): p_a + p_b def test_compare_different_curves(self): self.assertNotEqual(generator_256, generator_224) def test_equality_with_non_point(self): pj = PointJacobi.from_affine(generator_256) self.assertNotEqual(pj, "value") def test_conversion(self): pj = PointJacobi.from_affine(generator_256) pw = pj.to_affine() self.assertEqual(generator_256, pw) def test_single_double(self): pj = PointJacobi.from_affine(generator_256) pw = generator_256.double() pj = pj.double() self.assertEqual(pj.x(), pw.x()) self.assertEqual(pj.y(), pw.y()) def test_double_with_zero_point(self): pj = PointJacobi(curve_256, 0, 0, 1) pj = pj.double() self.assertIs(pj, INFINITY) def test_double_with_zero_equivalent_point(self): pj = PointJacobi(curve_256, 0, curve_256.p(), 1) pj = pj.double() self.assertIs(pj, INFINITY) def test_double_with_zero_equivalent_point_non_1_z(self): pj = PointJacobi(curve_256, 0, curve_256.p(), 2) pj = pj.double() self.assertIs(pj, INFINITY) def test_compare_with_affine_point(self): pj = PointJacobi.from_affine(generator_256) pa = pj.to_affine() self.assertEqual(pj, pa) self.assertEqual(pa, pj) def test_to_affine_with_zero_point(self): pj = PointJacobi(curve_256, 0, 0, 1) pa = pj.to_affine() self.assertIs(pa, INFINITY) def test_add_with_affine_point(self): pj = PointJacobi.from_affine(generator_256) pa = pj.to_affine() s = pj + pa self.assertEqual(s, pj.double()) def test_radd_with_affine_point(self): pj = PointJacobi.from_affine(generator_256) pa = pj.to_affine() s = pa + pj self.assertEqual(s, pj.double()) def test_add_with_infinity(self): pj = PointJacobi.from_affine(generator_256) s = pj + INFINITY self.assertEqual(s, pj) def test_add_zero_point_to_affine(self): pa = PointJacobi.from_affine(generator_256).to_affine() pj = PointJacobi(curve_256, 0, 0, 1) s = pj + pa self.assertIs(s, pa) def test_multiply_by_zero(self): pj = PointJacobi.from_affine(generator_256) pj = pj * 0 self.assertIs(pj, INFINITY) def test_zero_point_multiply_by_one(self): pj = PointJacobi(curve_256, 0, 0, 1) pj = pj * 1 self.assertIs(pj, INFINITY) def test_multiply_by_one(self): pj = PointJacobi.from_affine(generator_256) pw = generator_256 * 1 pj = pj * 1 self.assertEqual(pj.x(), pw.x()) self.assertEqual(pj.y(), pw.y()) def test_multiply_by_two(self): pj = PointJacobi.from_affine(generator_256) pw = generator_256 * 2 pj = pj * 2 self.assertEqual(pj.x(), pw.x()) self.assertEqual(pj.y(), pw.y()) def test_rmul_by_two(self): pj = PointJacobi.from_affine(generator_256) pw = generator_256 * 2 pj = 2 * pj self.assertEqual(pj, pw) def test_compare_non_zero_with_infinity(self): pj = PointJacobi.from_affine(generator_256) self.assertNotEqual(pj, INFINITY) def test_compare_zero_point_with_infinity(self): pj = PointJacobi(curve_256, 0, 0, 1) self.assertEqual(pj, INFINITY) def test_compare_double_with_multiply(self): pj = PointJacobi.from_affine(generator_256) dbl = pj.double() mlpl = pj * 2 self.assertEqual(dbl, mlpl) @settings(max_examples=10) @given( st.integers( min_value=0, max_value=int(generator_brainpoolp160r1.order()) ) ) def test_multiplications(self, mul): pj = PointJacobi.from_affine(generator_brainpoolp160r1) pw = pj.to_affine() * mul pj = pj * mul self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y())) self.assertEqual(pj, pw) @settings(max_examples=10) @given( st.integers( min_value=0, max_value=int(generator_brainpoolp160r1.order()) ) ) @example(0) @example(int(generator_brainpoolp160r1.order())) def test_precompute(self, mul): precomp = generator_brainpoolp160r1 self.assertTrue(precomp._PointJacobi__precompute) pj = PointJacobi.from_affine(generator_brainpoolp160r1) a = precomp * mul b = pj * mul self.assertEqual(a, b) @settings(max_examples=10) @given( st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), ) @example(3, 3) def test_add_scaled_points(self, a_mul, b_mul): j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) @settings(max_examples=10) @given( st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), ) def test_add_one_scaled_point(self, a_mul, b_mul, new_z): j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z, p)) new_zz = new_z * new_z % p b = PointJacobi( curve_brainpoolp160r1, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z, ) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) @settings(max_examples=10) @given( st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), ) @example(1, 1, 1) @example(3, 3, 3) @example(2, int(generator_brainpoolp160r1.order() - 2), 1) @example(2, int(generator_brainpoolp160r1.order() - 2), 3) def test_add_same_scale_points(self, a_mul, b_mul, new_z): j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z, p)) new_zz = new_z * new_z % p a = PointJacobi( curve_brainpoolp160r1, a.x() * new_zz % p, a.y() * new_zz * new_z % p, new_z, ) b = PointJacobi( curve_brainpoolp160r1, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z, ) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) def test_add_same_scale_points_static(self): j_g = generator_brainpoolp160r1 p = curve_brainpoolp160r1.p() a = j_g * 11 a.scale() z1 = 13 x = PointJacobi( curve_brainpoolp160r1, a.x() * z1**2 % p, a.y() * z1**3 % p, z1, ) y = PointJacobi( curve_brainpoolp160r1, a.x() * z1**2 % p, a.y() * z1**3 % p, z1, ) c = a + a self.assertEqual(c, x + y) @settings(max_examples=14) @given( st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.integers( min_value=1, max_value=int(generator_brainpoolp160r1.order()) ), st.lists( st.integers( min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1) ), min_size=2, max_size=2, unique=True, ), ) @example(2, 2, [2, 1]) @example(2, 2, [2, 3]) @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3]) @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1]) def test_add_different_scale_points(self, a_mul, b_mul, new_z): j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z[0], p)) assume(inverse_mod(new_z[1], p)) new_zz0 = new_z[0] * new_z[0] % p new_zz1 = new_z[1] * new_z[1] % p a = PointJacobi( curve_brainpoolp160r1, a.x() * new_zz0 % p, a.y() * new_zz0 * new_z[0] % p, new_z[0], ) b = PointJacobi( curve_brainpoolp160r1, b.x() * new_zz1 % p, b.y() * new_zz1 * new_z[1] % p, new_z[1], ) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) def test_add_different_scale_points_static(self): j_g = generator_brainpoolp160r1 p = curve_brainpoolp160r1.p() a = j_g * 11 a.scale() z1 = 13 x = PointJacobi( curve_brainpoolp160r1, a.x() * z1**2 % p, a.y() * z1**3 % p, z1, ) z2 = 29 y = PointJacobi( curve_brainpoolp160r1, a.x() * z2**2 % p, a.y() * z2**3 % p, z2, ) c = a + a self.assertEqual(c, x + y) def test_add_point_3_times(self): j_g = PointJacobi.from_affine(generator_256) self.assertEqual(j_g * 3, j_g + j_g + j_g) def test_mul_without_order(self): j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) self.assertEqual(j_g * generator_256.order(), INFINITY) def test_mul_add_inf(self): j_g = PointJacobi.from_affine(generator_256) self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1)) def test_mul_add_same(self): j_g = PointJacobi.from_affine(generator_256) self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1)) def test_mul_add_precompute(self): j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) b = PointJacobi.from_affine(j_g * 255, True) self.assertEqual(j_g * 256, j_g + b) self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7) self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7)) def test_mul_add_precompute_large(self): j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) b = PointJacobi.from_affine(j_g * 255, True) self.assertEqual(j_g * 256, j_g + b) self.assertEqual( j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 ) self.assertEqual( j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) ) def test_mul_add_to_mul(self): j_g = PointJacobi.from_affine(generator_256) a = j_g * 3 b = j_g.mul_add(2, j_g, 1) self.assertEqual(a, b) def test_mul_add_differnt(self): j_g = PointJacobi.from_affine(generator_256) w_a = j_g * 2 self.assertEqual(j_g.mul_add(1, w_a, 1), j_g * 3) def test_mul_add_slightly_different(self): j_g = PointJacobi.from_affine(generator_256) w_a = j_g * 2 w_b = j_g * 3 self.assertEqual(w_a.mul_add(1, w_b, 3), w_a * 1 + w_b * 3) def test_mul_add(self): j_g = PointJacobi.from_affine(generator_256) w_a = generator_256 * 255 w_b = generator_256 * (0xA8 * 0xF0) j_b = j_g * 0xA8 ret = j_g.mul_add(255, j_b, 0xF0) self.assertEqual(ret.to_affine(), w_a + w_b) def test_mul_add_large(self): j_g = PointJacobi.from_affine(generator_256) b = PointJacobi.from_affine(j_g * 255) self.assertEqual(j_g * 256, j_g + b) self.assertEqual( j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 ) self.assertEqual( j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) ) def test_mul_add_with_infinity_as_result(self): j_g = PointJacobi.from_affine(generator_256) order = generator_256.order() b = PointJacobi.from_affine(generator_256 * 256) self.assertEqual(j_g.mul_add(order % 256, b, order // 256), INFINITY) def test_mul_add_without_order(self): j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) order = generator_256.order() w_b = generator_256 * 34 w_b.scale() b = PointJacobi(curve_256, w_b.x(), w_b.y(), 1) self.assertEqual(j_g.mul_add(order % 34, b, order // 34), INFINITY) def test_mul_add_with_doubled_negation_of_itself(self): j_g = PointJacobi.from_affine(generator_256 * 17) dbl_neg = 2 * (-j_g) self.assertEqual(j_g.mul_add(4, dbl_neg, 2), INFINITY) def test_equality(self): pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) self.assertEqual(pj1, pj2) def test_equality_with_invalid_object(self): j_g = PointJacobi.from_affine(generator_256) self.assertNotEqual(j_g, 12) def test_equality_with_wrong_curves(self): p_a = PointJacobi.from_affine(generator_256) p_b = PointJacobi.from_affine(generator_224) self.assertNotEqual(p_a, p_b) def test_pickle(self): pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) self.assertEqual(pickle.loads(pickle.dumps(pj)), pj) @settings(**NO_OLD_SETTINGS) @given(st.integers(min_value=1, max_value=10)) def test_multithreading(self, thread_num): # ensure that generator's precomputation table is filled generator_112r2 * 2 # create a fresh point that doesn't have a filled precomputation table gen = generator_112r2 gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) self.assertEqual(gen._PointJacobi__precompute, []) def runner(generator): order = generator.order() for _ in range(10): generator * randrange(order) threads = [] for _ in range(thread_num): threads.append(threading.Thread(target=runner, args=(gen,))) for t in threads: t.start() runner(gen) for t in threads: t.join() self.assertEqual( gen._PointJacobi__precompute, generator_112r2._PointJacobi__precompute, ) @pytest.mark.skipif( platform.system() == "Windows", reason="there are no signals on Windows", ) def test_multithreading_with_interrupts(self): thread_num = 10 # ensure that generator's precomputation table is filled generator_112r2 * 2 # create a fresh point that doesn't have a filled precomputation table gen = generator_112r2 gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) self.assertEqual(gen._PointJacobi__precompute, []) def runner(generator): order = generator.order() for _ in range(50): generator * randrange(order) def interrupter(barrier_start, barrier_end, lock_exit): # wait until MainThread can handle KeyboardInterrupt barrier_start.release() barrier_end.acquire() os.kill(os.getpid(), signal.SIGINT) lock_exit.release() threads = [] for _ in range(thread_num): threads.append(threading.Thread(target=runner, args=(gen,))) barrier_start = threading.Lock() barrier_start.acquire() barrier_end = threading.Lock() barrier_end.acquire() lock_exit = threading.Lock() lock_exit.acquire() threads.append( threading.Thread( target=interrupter, args=(barrier_start, barrier_end, lock_exit), ) ) for t in threads: t.start() with self.assertRaises(KeyboardInterrupt): # signal to interrupter that we can now handle the signal barrier_start.acquire() barrier_end.release() runner(gen) # use the lock to ensure we never go past the scope of # assertRaises before the os.kill is called lock_exit.acquire() for t in threads: t.join() self.assertEqual( gen._PointJacobi__precompute, generator_112r2._PointJacobi__precompute, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1650905591.0 ecdsa-0.18.0/src/ecdsa/test_keys.py0000664005075200507520000010010114231550767016506 0ustar00hkariohkariotry: import unittest2 as unittest except ImportError: import unittest try: buffer except NameError: buffer = memoryview import os import array import pytest import hashlib from .keys import VerifyingKey, SigningKey, MalformedPointError from .der import ( unpem, UnexpectedDER, encode_sequence, encode_oid, encode_bitstring, ) from .util import ( sigencode_string, sigencode_der, sigencode_strings, sigdecode_string, sigdecode_der, sigdecode_strings, ) from .curves import NIST256p, Curve, BRAINPOOLP160r1, Ed25519, Ed448 from .ellipticcurve import Point, PointJacobi, CurveFp, INFINITY from .ecdsa import generator_brainpoolp160r1 class TestVerifyingKeyFromString(unittest.TestCase): """ Verify that ecdsa.keys.VerifyingKey.from_string() can be used with bytes-like objects """ @classmethod def setUpClass(cls): cls.key_bytes = ( b"\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf" b"\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1" b"Y\xfb\x92xa\xebo+\x9cG\xfav\xca" ) cls.vk = VerifyingKey.from_string(cls.key_bytes) def test_bytes(self): self.assertIsNotNone(self.vk) self.assertIsInstance(self.vk, VerifyingKey) self.assertEqual( self.vk.pubkey.point.x(), 105419898848891948935835657980914000059957975659675736097, ) self.assertEqual( self.vk.pubkey.point.y(), 4286866841217412202667522375431381222214611213481632495306, ) def test_bytes_memoryview(self): vk = VerifyingKey.from_string(buffer(self.key_bytes)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytearray(self): vk = VerifyingKey.from_string(bytearray(self.key_bytes)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytesarray_memoryview(self): vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes))) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_bytes(self): arr = array.array("B", self.key_bytes) vk = VerifyingKey.from_string(arr) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_bytes_memoryview(self): arr = array.array("B", self.key_bytes) vk = VerifyingKey.from_string(buffer(arr)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_ints(self): arr = array.array("I", self.key_bytes) vk = VerifyingKey.from_string(arr) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_ints_memoryview(self): arr = array.array("I", self.key_bytes) vk = VerifyingKey.from_string(buffer(arr)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytes_uncompressed(self): vk = VerifyingKey.from_string(b"\x04" + self.key_bytes) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytearray_uncompressed(self): vk = VerifyingKey.from_string(bytearray(b"\x04" + self.key_bytes)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytes_compressed(self): vk = VerifyingKey.from_string(b"\x02" + self.key_bytes[:24]) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytearray_compressed(self): vk = VerifyingKey.from_string(bytearray(b"\x02" + self.key_bytes[:24])) self.assertEqual(self.vk.to_string(), vk.to_string()) class TestVerifyingKeyFromDer(unittest.TestCase): """ Verify that ecdsa.keys.VerifyingKey.from_der() can be used with bytes-like objects. """ @classmethod def setUpClass(cls): prv_key_str = ( "-----BEGIN EC PRIVATE KEY-----\n" "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" "bA==\n" "-----END EC PRIVATE KEY-----\n" ) key_str = ( "-----BEGIN PUBLIC KEY-----\n" "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" "-----END PUBLIC KEY-----\n" ) cls.key_pem = key_str cls.key_bytes = unpem(key_str) assert isinstance(cls.key_bytes, bytes) cls.vk = VerifyingKey.from_pem(key_str) cls.sk = SigningKey.from_pem(prv_key_str) key_str = ( "-----BEGIN PUBLIC KEY-----\n" "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n" "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" "-----END PUBLIC KEY-----\n" ) cls.vk2 = VerifyingKey.from_pem(key_str) cls.sk2 = SigningKey.generate(vk.curve) def test_load_key_with_explicit_parameters(self): pub_key_str = ( "-----BEGIN PUBLIC KEY-----\n" "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" "-----END PUBLIC KEY-----" ) pk = VerifyingKey.from_pem(pub_key_str) pk_exp = VerifyingKey.from_string( b"\x04\x8a\xf5\x52\x48\x18\xb3\x98\xe6\x6c\x57\x3b\x8a\xdd\x7f" b"\x60\x8d\x97\x4f\xff\xc8\x95\xa1\x23\x30\xd6\x5f\xb7\x5a\x30" b"\x8e\xaa\x41\x60\x7d\x84\xac\x91\xe4\xe1\x4e\x4f\xa7\x85\xaf" b"\x5a\xad\x71\x98\x7c\x08\x66\x5b\x07\xec\x88\x38\x2a\x67\x9f" b"\x09\xf4\x7a\x4a\x65", curve=NIST256p, ) self.assertEqual(pk, pk_exp) def test_load_key_with_explicit_with_explicit_disabled(self): pub_key_str = ( "-----BEGIN PUBLIC KEY-----\n" "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" "-----END PUBLIC KEY-----" ) with self.assertRaises(UnexpectedDER): VerifyingKey.from_pem( pub_key_str, valid_curve_encodings=["named_curve"] ) def test_load_key_with_disabled_format(self): with self.assertRaises(MalformedPointError) as e: VerifyingKey.from_der(self.key_bytes, valid_encodings=["raw"]) self.assertIn("enabled (raw) encodings", str(e.exception)) def test_custom_hashfunc(self): vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256) self.assertIs(vk.default_hashfunc, hashlib.sha256) def test_from_pem_with_custom_hashfunc(self): vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256) self.assertIs(vk.default_hashfunc, hashlib.sha256) def test_bytes(self): vk = VerifyingKey.from_der(self.key_bytes) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytes_memoryview(self): vk = VerifyingKey.from_der(buffer(self.key_bytes)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytearray(self): vk = VerifyingKey.from_der(bytearray(self.key_bytes)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_bytesarray_memoryview(self): vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes))) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_bytes(self): arr = array.array("B", self.key_bytes) vk = VerifyingKey.from_der(arr) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_array_array_of_bytes_memoryview(self): arr = array.array("B", self.key_bytes) vk = VerifyingKey.from_der(buffer(arr)) self.assertEqual(self.vk.to_string(), vk.to_string()) def test_equality_on_verifying_keys(self): self.assertEqual(self.vk, self.sk.get_verifying_key()) def test_inequality_on_verifying_keys(self): self.assertNotEqual(self.vk, self.vk2) def test_inequality_on_verifying_keys_not_implemented(self): self.assertNotEqual(self.vk, None) def test_VerifyingKey_inequality_on_same_curve(self): self.assertNotEqual(self.vk, self.sk2.verifying_key) def test_SigningKey_inequality_on_same_curve(self): self.assertNotEqual(self.sk, self.sk2) def test_inequality_on_wrong_types(self): self.assertNotEqual(self.vk, self.sk) def test_from_public_point_old(self): pj = self.vk.pubkey.point point = Point(pj.curve(), pj.x(), pj.y()) vk = VerifyingKey.from_public_point(point, self.vk.curve) self.assertEqual(vk, self.vk) def test_ed25519_VerifyingKey_repr__(self): sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) string = repr(sk.verifying_key) self.assertEqual( "VerifyingKey.from_string(" "bytearray(b'K\\x0c\\xfbZH\\x8e\\x8c\\x8c\\x07\\xee\\xda\\xfb" "\\xe1\\x97\\xcd\\x90\\x18\\x02\\x15h]\\xfe\\xbe\\xcbB\\xba\\xe6r" "\\x10\\xae\\xf1P'), Ed25519, None)", string, ) def test_edwards_from_public_point(self): point = Ed25519.generator with self.assertRaises(ValueError) as e: VerifyingKey.from_public_point(point, Ed25519) self.assertIn("incompatible with Edwards", str(e.exception)) def test_edwards_precompute_no_side_effect(self): sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) vk = sk.verifying_key vk2 = VerifyingKey.from_string(vk.to_string(), Ed25519) vk.precompute() self.assertEqual(vk, vk2) def test_parse_malfomed_eddsa_der_pubkey(self): der_str = encode_sequence( encode_sequence(encode_oid(*Ed25519.oid)), encode_bitstring(bytes(Ed25519.generator.to_bytes()), 0), encode_bitstring(b"\x00", 0), ) with self.assertRaises(UnexpectedDER) as e: VerifyingKey.from_der(der_str) self.assertIn("trailing junk after public key", str(e.exception)) def test_edwards_from_public_key_recovery(self): with self.assertRaises(ValueError) as e: VerifyingKey.from_public_key_recovery(b"", b"", Ed25519) self.assertIn("unsupported for Edwards", str(e.exception)) def test_edwards_from_public_key_recovery_with_digest(self): with self.assertRaises(ValueError) as e: VerifyingKey.from_public_key_recovery_with_digest( b"", b"", Ed25519 ) self.assertIn("unsupported for Edwards", str(e.exception)) def test_load_ed25519_from_pem(self): vk_pem = ( "-----BEGIN PUBLIC KEY-----\n" "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" "-----END PUBLIC KEY-----\n" ) vk = VerifyingKey.from_pem(vk_pem) self.assertIsInstance(vk.curve, Curve) self.assertIs(vk.curve, Ed25519) vk_str = ( b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" ) vk_2 = VerifyingKey.from_string(vk_str, Ed25519) self.assertEqual(vk, vk_2) def test_export_ed255_to_pem(self): vk_str = ( b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" ) vk = VerifyingKey.from_string(vk_str, Ed25519) vk_pem = ( b"-----BEGIN PUBLIC KEY-----\n" b"MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" b"-----END PUBLIC KEY-----\n" ) self.assertEqual(vk_pem, vk.to_pem()) def test_ed25519_export_import(self): sk = SigningKey.generate(Ed25519) vk = sk.verifying_key vk2 = VerifyingKey.from_pem(vk.to_pem()) self.assertEqual(vk, vk2) def test_ed25519_sig_verify(self): vk_pem = ( "-----BEGIN PUBLIC KEY-----\n" "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" "-----END PUBLIC KEY-----\n" ) vk = VerifyingKey.from_pem(vk_pem) data = b"data\n" # signature created by OpenSSL 3.0.0 beta1 sig = ( b"\x64\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" ) self.assertTrue(vk.verify(sig, data)) def test_ed448_from_pem(self): pem_str = ( "-----BEGIN PUBLIC KEY-----\n" "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" "-----END PUBLIC KEY-----\n" ) vk = VerifyingKey.from_pem(pem_str) self.assertIsInstance(vk.curve, Curve) self.assertIs(vk.curve, Ed448) vk_str = ( b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" ) vk2 = VerifyingKey.from_string(vk_str, Ed448) self.assertEqual(vk, vk2) def test_ed448_to_pem(self): vk_str = ( b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" ) vk = VerifyingKey.from_string(vk_str, Ed448) vk_pem = ( b"-----BEGIN PUBLIC KEY-----\n" b"MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" b"dTdYD2ll94g58MhSnBiBQB9A1MMA\n" b"-----END PUBLIC KEY-----\n" ) self.assertEqual(vk_pem, vk.to_pem()) def test_ed448_export_import(self): sk = SigningKey.generate(Ed448) vk = sk.verifying_key vk2 = VerifyingKey.from_pem(vk.to_pem()) self.assertEqual(vk, vk2) def test_ed448_sig_verify(self): pem_str = ( "-----BEGIN PUBLIC KEY-----\n" "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" "-----END PUBLIC KEY-----\n" ) vk = VerifyingKey.from_pem(pem_str) data = b"data\n" # signature created by OpenSSL 3.0.0 beta1 sig = ( b"\x68\xed\x2c\x70\x35\x22\xca\x1c\x35\x03\xf3\xaa\x51\x33\x3d\x00" b"\xc0\xae\xb0\x54\xc5\xdc\x7f\x6f\x30\x57\xb4\x1d\xcb\xe9\xec\xfa" b"\xc8\x45\x3e\x51\xc1\xcb\x60\x02\x6a\xd0\x43\x11\x0b\x5f\x9b\xfa" b"\x32\x88\xb2\x38\x6b\xed\xac\x09\x00\x78\xb1\x7b\x5d\x7e\xf8\x16" b"\x31\xdd\x1b\x3f\x98\xa0\xce\x19\xe7\xd8\x1c\x9f\x30\xac\x2f\xd4" b"\x1e\x55\xbf\x21\x98\xf6\x4c\x8c\xbe\x81\xa5\x2d\x80\x4c\x62\x53" b"\x91\xd5\xee\x03\x30\xc6\x17\x66\x4b\x9e\x0c\x8d\x40\xd0\xad\xae" b"\x0a\x00" ) self.assertTrue(vk.verify(sig, data)) class TestSigningKey(unittest.TestCase): """ Verify that ecdsa.keys.SigningKey.from_der() can be used with bytes-like objects. """ @classmethod def setUpClass(cls): prv_key_str = ( "-----BEGIN EC PRIVATE KEY-----\n" "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" "bA==\n" "-----END EC PRIVATE KEY-----\n" ) cls.sk1 = SigningKey.from_pem(prv_key_str) prv_key_str = ( "-----BEGIN PRIVATE KEY-----\n" "MG8CAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQEEVTBTAgEBBBheyEIL1u+SUqlC6YkE\n" "PKKfVh+lJXcOscWhNAMyAAS4gXfQhO8X9eRWOUCAKDYPn1m0pNcmTmLaBlHc5Ho1\n" "pMW0XPUVk0I6i1V7nCCZ82w=\n" "-----END PRIVATE KEY-----\n" ) cls.sk1_pkcs8 = SigningKey.from_pem(prv_key_str) prv_key_str = ( "-----BEGIN EC PRIVATE KEY-----\n" "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n" "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n" "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" "-----END EC PRIVATE KEY-----\n" ) cls.sk2 = SigningKey.from_pem(prv_key_str) def test_decoding_explicit_curve_parameters(self): prv_key_str = ( "-----BEGIN PRIVATE KEY-----\n" "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" "-----END PRIVATE KEY-----\n" ) sk = SigningKey.from_pem(prv_key_str) sk2 = SigningKey.from_string( b"\x21\x7b\x51\x11\xf5\x26\x47\x5e\xab\x65\xb9\xaf\x0c\x60\xf6" b"\x94\x01\x05\x3d\x96\xb6\x0c\xb3\xf2\xcf\x47\x33\x4a\x36\xb9" b"\xf3\x20", curve=NIST256p, ) self.assertEqual(sk, sk2) def test_decoding_explicit_curve_parameters_with_explicit_disabled(self): prv_key_str = ( "-----BEGIN PRIVATE KEY-----\n" "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" "-----END PRIVATE KEY-----\n" ) with self.assertRaises(UnexpectedDER): SigningKey.from_pem( prv_key_str, valid_curve_encodings=["named_curve"] ) def test_equality_on_signing_keys(self): sk = SigningKey.from_secret_exponent( self.sk1.privkey.secret_multiplier, self.sk1.curve ) self.assertEqual(self.sk1, sk) self.assertEqual(self.sk1_pkcs8, sk) def test_verify_with_empty_message(self): sig = self.sk1.sign(b"") self.assertTrue(sig) vk = self.sk1.verifying_key self.assertTrue(vk.verify(sig, b"")) def test_verify_with_precompute(self): sig = self.sk1.sign(b"message") vk = self.sk1.verifying_key vk.precompute() self.assertTrue(vk.verify(sig, b"message")) def test_compare_verifying_key_with_precompute(self): vk1 = self.sk1.verifying_key vk1.precompute() vk2 = self.sk1_pkcs8.verifying_key self.assertEqual(vk1, vk2) def test_verify_with_lazy_precompute(self): sig = self.sk2.sign(b"other message") vk = self.sk2.verifying_key vk.precompute(lazy=True) self.assertTrue(vk.verify(sig, b"other message")) def test_inequality_on_signing_keys(self): self.assertNotEqual(self.sk1, self.sk2) def test_inequality_on_signing_keys_not_implemented(self): self.assertNotEqual(self.sk1, None) def test_ed25519_from_pem(self): pem_str = ( "-----BEGIN PRIVATE KEY-----\n" "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" "-----END PRIVATE KEY-----\n" ) sk = SigningKey.from_pem(pem_str) sk_str = SigningKey.from_string( b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", Ed25519, ) self.assertEqual(sk, sk_str) def test_ed25519_to_pem(self): sk = SigningKey.from_string( b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", Ed25519, ) pem_str = ( b"-----BEGIN PRIVATE KEY-----\n" b"MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" b"-----END PRIVATE KEY-----\n" ) self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) def test_ed25519_to_and_from_pem(self): sk = SigningKey.generate(Ed25519) decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) self.assertEqual(sk, decoded) def test_ed448_from_pem(self): pem_str = ( "-----BEGIN PRIVATE KEY-----\n" "MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmP\n" "OP0JMYaLGlTzwovmvCDJ2zLaezu9NLz9aQ==\n" "-----END PRIVATE KEY-----\n" ) sk = SigningKey.from_pem(pem_str) sk_str = SigningKey.from_string( b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", Ed448, ) self.assertEqual(sk, sk_str) def test_ed448_to_pem(self): sk = SigningKey.from_string( b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", Ed448, ) pem_str = ( b"-----BEGIN PRIVATE KEY-----\n" b"MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmP\n" b"OP0JMYaLGlTzwovmvCDJ2zLaezu9NLz9aQ==\n" b"-----END PRIVATE KEY-----\n" ) self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) def test_ed448_encode_decode(self): sk = SigningKey.generate(Ed448) decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) self.assertEqual(decoded, sk) class TestTrivialCurve(unittest.TestCase): @classmethod def setUpClass(cls): # To test what happens with r or s in signing happens to be zero we # need to find a scalar that creates one of the points on a curve that # has x coordinate equal to zero. # Even for secp112r2 curve that's non trivial so use this toy # curve, for which we can iterate over all points quickly curve = CurveFp(163, 84, 58) gen = PointJacobi(curve, 2, 87, 1, 167, generator=True) cls.toy_curve = Curve("toy_p8", curve, gen, (1, 2, 0)) cls.sk = SigningKey.from_secret_exponent( 140, cls.toy_curve, hashfunc=hashlib.sha1, ) def test_generator_sanity(self): gen = self.toy_curve.generator self.assertEqual(gen * gen.order(), INFINITY) def test_public_key_sanity(self): self.assertEqual(self.sk.verifying_key.to_string(), b"\x98\x1e") def test_deterministic_sign(self): sig = self.sk.sign_deterministic(b"message") self.assertEqual(sig, b"-.") self.assertTrue(self.sk.verifying_key.verify(sig, b"message")) def test_deterministic_sign_random_message(self): msg = os.urandom(32) sig = self.sk.sign_deterministic(msg) self.assertEqual(len(sig), 2) self.assertTrue(self.sk.verifying_key.verify(sig, msg)) def test_deterministic_sign_that_rises_R_zero_error(self): # the raised RSZeroError is caught and handled internally by # sign_deterministic methods msg = b"\x00\x4f" sig = self.sk.sign_deterministic(msg) self.assertEqual(sig, b"\x36\x9e") self.assertTrue(self.sk.verifying_key.verify(sig, msg)) def test_deterministic_sign_that_rises_S_zero_error(self): msg = b"\x01\x6d" sig = self.sk.sign_deterministic(msg) self.assertEqual(sig, b"\x49\x6c") self.assertTrue(self.sk.verifying_key.verify(sig, msg)) # test VerifyingKey.verify() prv_key_str = ( "-----BEGIN EC PRIVATE KEY-----\n" "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" "bA==\n" "-----END EC PRIVATE KEY-----\n" ) key_bytes = unpem(prv_key_str) assert isinstance(key_bytes, bytes) sk = SigningKey.from_der(key_bytes) vk = sk.verifying_key data = ( b"some string for signing" b"contents don't really matter" b"but do include also some crazy values: " b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0" ) assert len(data) % 4 == 0 sha1 = hashlib.sha1() sha1.update(data) data_hash = sha1.digest() assert isinstance(data_hash, bytes) sig_raw = sk.sign(data, sigencode=sigencode_string) assert isinstance(sig_raw, bytes) sig_der = sk.sign(data, sigencode=sigencode_der) assert isinstance(sig_der, bytes) sig_strings = sk.sign(data, sigencode=sigencode_strings) assert isinstance(sig_strings[0], bytes) verifiers = [] for modifier, fun in [ ("bytes", lambda x: x), ("bytes memoryview", lambda x: buffer(x)), ("bytearray", lambda x: bytearray(x)), ("bytearray memoryview", lambda x: buffer(bytearray(x))), ("array.array of bytes", lambda x: array.array("B", x)), ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), ("array.array of ints", lambda x: array.array("I", x)), ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), ]: if "ints" in modifier: conv = lambda x: x else: conv = fun for sig_format, signature, decoder, mod_apply in [ ("raw", sig_raw, sigdecode_string, lambda x: conv(x)), ("der", sig_der, sigdecode_der, lambda x: conv(x)), ( "strings", sig_strings, sigdecode_strings, lambda x: tuple(conv(i) for i in x), ), ]: for method_name, vrf_mthd, vrf_data in [ ("verify", vk.verify, data), ("verify_digest", vk.verify_digest, data_hash), ]: verifiers.append( pytest.param( signature, decoder, mod_apply, fun, vrf_mthd, vrf_data, id="{2}-{0}-{1}".format(modifier, sig_format, method_name), ) ) @pytest.mark.parametrize( "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data", verifiers ) def test_VerifyingKey_verify( signature, decoder, mod_apply, fun, vrf_mthd, vrf_data ): sig = mod_apply(signature) assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder) # test SigningKey.from_string() prv_key_bytes = ( b"^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2" b"\x9fV\x1f\xa5%w\x0e\xb1\xc5" ) assert len(prv_key_bytes) == 24 converters = [] for modifier, convert in [ ("bytes", lambda x: x), ("bytes memoryview", buffer), ("bytearray", bytearray), ("bytearray memoryview", lambda x: buffer(bytearray(x))), ("array.array of bytes", lambda x: array.array("B", x)), ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), ("array.array of ints", lambda x: array.array("I", x)), ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), ]: converters.append(pytest.param(convert, id=modifier)) @pytest.mark.parametrize("convert", converters) def test_SigningKey_from_string(convert): key = convert(prv_key_bytes) sk = SigningKey.from_string(key) assert sk.to_string() == prv_key_bytes # test SigningKey.from_der() prv_key_str = ( "-----BEGIN EC PRIVATE KEY-----\n" "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" "bA==\n" "-----END EC PRIVATE KEY-----\n" ) key_bytes = unpem(prv_key_str) assert isinstance(key_bytes, bytes) # last two converters are for array.array of ints, those require input # that's multiple of 4, which no curve we support produces @pytest.mark.parametrize("convert", converters[:-2]) def test_SigningKey_from_der(convert): key = convert(key_bytes) sk = SigningKey.from_der(key) assert sk.to_string() == prv_key_bytes # test SigningKey.sign_deterministic() extra_entropy = b"\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11" @pytest.mark.parametrize("convert", converters) def test_SigningKey_sign_deterministic(convert): sig = sk.sign_deterministic( convert(data), extra_entropy=convert(extra_entropy) ) vk.verify(sig, data) # test SigningKey.sign_digest_deterministic() @pytest.mark.parametrize("convert", converters) def test_SigningKey_sign_digest_deterministic(convert): sig = sk.sign_digest_deterministic( convert(data_hash), extra_entropy=convert(extra_entropy) ) vk.verify(sig, data) @pytest.mark.parametrize("convert", converters) def test_SigningKey_sign(convert): sig = sk.sign(convert(data)) vk.verify(sig, data) @pytest.mark.parametrize("convert", converters) def test_SigningKey_sign_digest(convert): sig = sk.sign_digest(convert(data_hash)) vk.verify(sig, data) def test_SigningKey_with_unlikely_value(): sk = SigningKey.from_secret_exponent(NIST256p.order - 1, curve=NIST256p) vk = sk.verifying_key sig = sk.sign(b"hello") assert vk.verify(sig, b"hello") def test_SigningKey_with_custom_curve_old_point(): generator = generator_brainpoolp160r1 generator = Point( generator.curve(), generator.x(), generator.y(), generator.order(), ) curve = Curve( "BRAINPOOLP160r1", generator.curve(), generator, (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), ) sk = SigningKey.from_secret_exponent(12, curve) sk2 = SigningKey.from_secret_exponent(12, BRAINPOOLP160r1) assert sk.privkey == sk2.privkey def test_VerifyingKey_inequality_with_different_curves(): sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) sk2 = SigningKey.from_secret_exponent(2, NIST256p) assert sk1.verifying_key != sk2.verifying_key def test_VerifyingKey_inequality_with_different_secret_points(): sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) sk2 = SigningKey.from_secret_exponent(3, BRAINPOOLP160r1) assert sk1.verifying_key != sk2.verifying_key def test_SigningKey_from_pem_pkcs8v2_EdDSA(): pem = """-----BEGIN PRIVATE KEY----- MFMCAQEwBQYDK2VwBCIEICc2F2ag1n1QP0jY+g9qWx5sDkx0s/HdNi3cSRHw+zsI oSMDIQA+HQ2xCif8a/LMWR2m5HaCm5I2pKe/cc8OiRANMHxjKQ== -----END PRIVATE KEY-----""" sk = SigningKey.from_pem(pem) assert sk.curve == Ed25519 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1650905591.0 ecdsa-0.18.0/src/ecdsa/test_malformed_sigs.py0000664005075200507520000002515114231550767020541 0ustar00hkariohkariofrom __future__ import with_statement, division import hashlib try: from hashlib import algorithms_available except ImportError: # pragma: no cover algorithms_available = [ "md5", "sha1", "sha224", "sha256", "sha384", "sha512", ] # skip algorithms broken by change to OpenSSL 3.0 and early versions # of hashlib that list algorithms that require the legacy provider to work # https://bugs.python.org/issue38820 algorithms_available = [ i for i in algorithms_available if i not in ("mdc2", "md2", "md4", "whirlpool", "ripemd160") ] from functools import partial import pytest import sys import hypothesis.strategies as st from hypothesis import note, assume, given, settings, example from .keys import SigningKey from .keys import BadSignatureError from .util import sigencode_der, sigencode_string from .util import sigdecode_der, sigdecode_string from .curves import curves from .der import ( encode_integer, encode_bitstring, encode_octet_string, encode_oid, encode_sequence, encode_constructed, ) from .ellipticcurve import CurveEdTw example_data = b"some data to sign" """Since the data is hashed for processing, really any string will do.""" hash_and_size = [ (name, hashlib.new(name).digest_size) for name in algorithms_available ] """Pairs of hash names and their output sizes. Needed for pairing with curves as we don't support hashes bigger than order sizes of curves.""" keys_and_sigs = [] """Name of the curve+hash combination, VerifyingKey and DER signature.""" # for hypothesis strategy shrinking we want smallest curves and hashes first for curve in sorted(curves, key=lambda x: x.baselen): for hash_alg in [ name for name, size in sorted(hash_and_size, key=lambda x: x[1]) if 0 < size <= curve.baselen ]: sk = SigningKey.generate( curve, hashfunc=partial(hashlib.new, hash_alg) ) keys_and_sigs.append( ( "{0} {1}".format(curve, hash_alg), sk.verifying_key, sk.sign(example_data, sigencode=sigencode_der), ) ) # first make sure that the signatures can be verified @pytest.mark.parametrize( "verifying_key,signature", [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs], ) def test_signatures(verifying_key, signature): assert verifying_key.verify( signature, example_data, sigdecode=sigdecode_der ) @st.composite def st_fuzzed_sig(draw, keys_and_sigs): """ Hypothesis strategy that generates pairs of VerifyingKey and malformed signatures created by fuzzing of a valid signature. """ name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs)) note("Configuration: {0}".format(name)) sig = bytearray(old_sig) # decide which bytes should be removed to_remove = draw( st.lists(st.integers(min_value=0, max_value=len(sig) - 1), unique=True) ) to_remove.sort() for i in reversed(to_remove): del sig[i] note("Remove bytes: {0}".format(to_remove)) # decide which bytes of the original signature should be changed if sig: # pragma: no branch xors = draw( st.dictionaries( st.integers(min_value=0, max_value=len(sig) - 1), st.integers(min_value=1, max_value=255), ) ) for i, val in xors.items(): sig[i] ^= val note("xors: {0}".format(xors)) # decide where new data should be inserted insert_pos = draw(st.integers(min_value=0, max_value=len(sig))) # NIST521p signature is about 140 bytes long, test slightly longer insert_data = draw(st.binary(max_size=256)) sig = sig[:insert_pos] + insert_data + sig[insert_pos:] note( "Inserted at position {0} bytes: {1!r}".format(insert_pos, insert_data) ) sig = bytes(sig) # make sure that there was performed at least one mutation on the data assume(to_remove or xors or insert_data) # and that the mutations didn't cancel each-other out assume(sig != old_sig) return verifying_key, sig params = {} # not supported in hypothesis 2.0.0 if sys.version_info >= (2, 7): # pragma: no branch from hypothesis import HealthCheck # deadline=5s because NIST521p are slow to verify params["deadline"] = 5000 params["suppress_health_check"] = [ HealthCheck.data_too_large, HealthCheck.filter_too_much, HealthCheck.too_slow, ] slow_params = dict(params) slow_params["max_examples"] = 10 @settings(**params) @given(st_fuzzed_sig(keys_and_sigs)) def test_fuzzed_der_signatures(args): verifying_key, sig = args with pytest.raises(BadSignatureError): verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) @st.composite def st_random_der_ecdsa_sig_value(draw): """ Hypothesis strategy for selecting random values and encoding them to ECDSA-Sig-Value object:: ECDSA-Sig-Value ::= SEQUENCE { r INTEGER, s INTEGER } """ name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs)) note("Configuration: {0}".format(name)) order = int(verifying_key.curve.order) # the encode_integer doesn't support negative numbers, would be nice # to generate them too, but we have coverage for remove_integer() # verifying that it doesn't accept them, so meh. # Test all numbers around the ones that can show up (around order) # way smaller and slightly bigger r = draw( st.integers(min_value=0, max_value=order << 4) | st.integers(min_value=order >> 2, max_value=order + 1) ) s = draw( st.integers(min_value=0, max_value=order << 4) | st.integers(min_value=order >> 2, max_value=order + 1) ) sig = encode_sequence(encode_integer(r), encode_integer(s)) return verifying_key, sig @settings(**slow_params) @given(st_random_der_ecdsa_sig_value()) def test_random_der_ecdsa_sig_value(params): """ Check if random values encoded in ECDSA-Sig-Value structure are rejected as signature. """ verifying_key, sig = params with pytest.raises(BadSignatureError): verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) def st_der_integer(*args, **kwargs): """ Hypothesis strategy that returns a random positive integer as DER INTEGER. Parameters are passed to hypothesis.strategy.integer. """ if "min_value" not in kwargs: # pragma: no branch kwargs["min_value"] = 0 return st.builds(encode_integer, st.integers(*args, **kwargs)) @st.composite def st_der_bit_string(draw, *args, **kwargs): """ Hypothesis strategy that returns a random DER BIT STRING. Parameters are passed to hypothesis.strategy.binary. """ data = draw(st.binary(*args, **kwargs)) if data: unused = draw(st.integers(min_value=0, max_value=7)) data = bytearray(data) data[-1] &= -(2**unused) data = bytes(data) else: unused = 0 return encode_bitstring(data, unused) def st_der_octet_string(*args, **kwargs): """ Hypothesis strategy that returns a random DER OCTET STRING object. Parameters are passed to hypothesis.strategy.binary """ return st.builds(encode_octet_string, st.binary(*args, **kwargs)) def st_der_null(): """ Hypothesis strategy that returns DER NULL object. """ return st.just(b"\x05\x00") @st.composite def st_der_oid(draw): """ Hypothesis strategy that returns DER OBJECT IDENTIFIER objects. """ first = draw(st.integers(min_value=0, max_value=2)) if first < 2: second = draw(st.integers(min_value=0, max_value=39)) else: second = draw(st.integers(min_value=0, max_value=2**512)) rest = draw( st.lists(st.integers(min_value=0, max_value=2**512), max_size=50) ) return encode_oid(first, second, *rest) def st_der(): """ Hypothesis strategy that returns random DER structures. A valid DER structure is any primitive object, an octet encoding of a valid DER structure, sequence of valid DER objects or a constructed encoding of any of the above. """ return st.recursive( st.just(b"") | st_der_integer(max_value=2**4096) | st_der_bit_string(max_size=1024**2) | st_der_octet_string(max_size=1024**2) | st_der_null() | st_der_oid(), lambda children: st.builds( lambda x: encode_octet_string(x), st.one_of(children) ) | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) | st.builds( lambda x: encode_sequence(*x), st.lists(children, max_size=200) ) | st.builds( lambda tag, x: encode_constructed(tag, x), st.integers(min_value=0, max_value=0x3F), st.one_of(children), ), max_leaves=40, ) @settings(**params) @given(st.sampled_from(keys_and_sigs), st_der()) def test_random_der_as_signature(params, der): """Check if random DER structures are rejected as signature""" name, verifying_key, _ = params with pytest.raises(BadSignatureError): verifying_key.verify(der, example_data, sigdecode=sigdecode_der) @settings(**params) @given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024**2)) @example( keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0)) ) @example( keys_and_sigs[0], encode_sequence(encode_integer(1), encode_integer(1)) + b"\x00", ) @example(keys_and_sigs[0], encode_sequence(*[encode_integer(1)] * 3)) def test_random_bytes_as_signature(params, der): """Check if random bytes are rejected as signature""" name, verifying_key, _ = params with pytest.raises(BadSignatureError): verifying_key.verify(der, example_data, sigdecode=sigdecode_der) keys_and_string_sigs = [ ( name, verifying_key, sigencode_string( *sigdecode_der(sig, verifying_key.curve.order), order=verifying_key.curve.order ), ) for name, verifying_key, sig in keys_and_sigs if not isinstance(verifying_key.curve.curve, CurveEdTw) ] """ Name of the curve+hash combination, VerifyingKey and signature as a byte string. """ keys_and_string_sigs += [ ( name, verifying_key, sig, ) for name, verifying_key, sig in keys_and_sigs if isinstance(verifying_key.curve.curve, CurveEdTw) ] @settings(**params) @given(st_fuzzed_sig(keys_and_string_sigs)) def test_fuzzed_string_signatures(params): verifying_key, sig = params with pytest.raises(BadSignatureError): verifying_key.verify(sig, example_data, sigdecode=sigdecode_string) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1649084541.0 ecdsa-0.18.0/src/ecdsa/test_numbertheory.py0000664005075200507520000002654314222604175020270 0ustar00hkariohkarioimport operator from functools import reduce try: import unittest2 as unittest except ImportError: import unittest import hypothesis.strategies as st import pytest from hypothesis import given, settings, example try: from hypothesis import HealthCheck HC_PRESENT = True except ImportError: # pragma: no cover HC_PRESENT = False from .numbertheory import ( SquareRootError, JacobiError, factorization, gcd, lcm, jacobi, inverse_mod, is_prime, next_prime, smallprimes, square_root_mod_prime, ) BIGPRIMES = ( 999671, 999683, 999721, 999727, 999749, 999763, 999769, 999773, 999809, 999853, 999863, 999883, 999907, 999917, 999931, 999953, 999959, 999961, 999979, 999983, ) @pytest.mark.parametrize( "prime, next_p", [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])] ) def test_next_prime(prime, next_p): assert next_prime(prime) == next_p @pytest.mark.parametrize("val", [-1, 0, 1]) def test_next_prime_with_nums_less_2(val): assert next_prime(val) == 2 @pytest.mark.parametrize("prime", smallprimes) def test_square_root_mod_prime_for_small_primes(prime): squares = set() for num in range(0, 1 + prime // 2): sq = num * num % prime squares.add(sq) root = square_root_mod_prime(sq, prime) # tested for real with TestNumbertheory.test_square_root_mod_prime assert root * root % prime == sq for nonsquare in range(0, prime): if nonsquare in squares: continue with pytest.raises(SquareRootError): square_root_mod_prime(nonsquare, prime) def test_square_root_mod_prime_for_2(): a = square_root_mod_prime(1, 2) assert a == 1 def test_square_root_mod_prime_for_small_prime(): root = square_root_mod_prime(98**2 % 101, 101) assert root * root % 101 == 9 def test_square_root_mod_prime_for_p_congruent_5(): p = 13 assert p % 8 == 5 root = square_root_mod_prime(3, p) assert root * root % p == 3 def test_square_root_mod_prime_for_p_congruent_5_large_d(): p = 29 assert p % 8 == 5 root = square_root_mod_prime(4, p) assert root * root % p == 4 class TestSquareRootModPrime(unittest.TestCase): def test_power_of_2_p(self): with self.assertRaises(JacobiError): square_root_mod_prime(12, 32) def test_no_square(self): with self.assertRaises(SquareRootError) as e: square_root_mod_prime(12, 31) self.assertIn("no square root", str(e.exception)) def test_non_prime(self): with self.assertRaises(SquareRootError) as e: square_root_mod_prime(12, 33) self.assertIn("p is not prime", str(e.exception)) def test_non_prime_with_negative(self): with self.assertRaises(SquareRootError) as e: square_root_mod_prime(697 - 1, 697) self.assertIn("p is not prime", str(e.exception)) @st.composite def st_two_nums_rel_prime(draw): # 521-bit is the biggest curve we operate on, use 1024 for a bit # of breathing space mod = draw(st.integers(min_value=2, max_value=2**1024)) num = draw( st.integers(min_value=1, max_value=mod - 1).filter( lambda x: gcd(x, mod) == 1 ) ) return num, mod @st.composite def st_primes(draw, *args, **kwargs): if "min_value" not in kwargs: # pragma: no branch kwargs["min_value"] = 1 prime = draw( st.sampled_from(smallprimes) | st.integers(*args, **kwargs).filter(is_prime) ) return prime @st.composite def st_num_square_prime(draw): prime = draw(st_primes(max_value=2**1024)) num = draw(st.integers(min_value=0, max_value=1 + prime // 2)) sq = num * num % prime return sq, prime @st.composite def st_comp_with_com_fac(draw): """ Strategy that returns lists of numbers, all having a common factor. """ primes = draw( st.lists(st_primes(max_value=2**512), min_size=1, max_size=10) ) # select random prime(s) that will make the common factor of composites com_fac_primes = draw( st.lists(st.sampled_from(primes), min_size=1, max_size=20) ) com_fac = reduce(operator.mul, com_fac_primes, 1) # select at most 20 lists (returned numbers), # each having at most 30 primes (factors) including none (then the number # will be 1) comp_primes = draw( st.integers(min_value=1, max_value=20).flatmap( lambda n: st.lists( st.lists(st.sampled_from(primes), max_size=30), min_size=1, max_size=n, ) ) ) return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes] @st.composite def st_comp_no_com_fac(draw): """ Strategy that returns lists of numbers that don't have a common factor. """ primes = draw( st.lists( st_primes(max_value=2**512), min_size=2, max_size=10, unique=True ) ) # first select the primes that will create the uncommon factor # between returned numbers uncom_fac_primes = draw( st.lists( st.sampled_from(primes), min_size=1, max_size=len(primes) - 1, unique=True, ) ) uncom_fac = reduce(operator.mul, uncom_fac_primes, 1) # then build composites from leftover primes leftover_primes = [i for i in primes if i not in uncom_fac_primes] assert leftover_primes assert uncom_fac_primes # select at most 20 lists, each having at most 30 primes # selected from the leftover_primes list number_primes = draw( st.integers(min_value=1, max_value=20).flatmap( lambda n: st.lists( st.lists(st.sampled_from(leftover_primes), max_size=30), min_size=1, max_size=n, ) ) ) numbers = [reduce(operator.mul, nums, 1) for nums in number_primes] insert_at = draw(st.integers(min_value=0, max_value=len(numbers))) numbers.insert(insert_at, uncom_fac) return numbers HYP_SETTINGS = {} if HC_PRESENT: # pragma: no branch HYP_SETTINGS["suppress_health_check"] = [ HealthCheck.filter_too_much, HealthCheck.too_slow, ] # the factorization() sometimes takes a long time to finish HYP_SETTINGS["deadline"] = 5000 HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) HYP_SLOW_SETTINGS["max_examples"] = 10 class TestIsPrime(unittest.TestCase): def test_very_small_prime(self): assert is_prime(23) def test_very_small_composite(self): assert not is_prime(22) def test_small_prime(self): assert is_prime(123456791) def test_special_composite(self): assert not is_prime(10261) def test_medium_prime_1(self): # nextPrime[2^256] assert is_prime(2**256 + 0x129) def test_medium_prime_2(self): # nextPrime(2^256+0x129) assert is_prime(2**256 + 0x12D) def test_medium_trivial_composite(self): assert not is_prime(2**256 + 0x130) def test_medium_non_trivial_composite(self): assert not is_prime(2**256 + 0x12F) def test_large_prime(self): # nextPrime[2^2048] assert is_prime(2**2048 + 0x3D5) class TestNumbertheory(unittest.TestCase): def test_gcd(self): assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5 assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5 assert gcd(3) == 3 @unittest.skipUnless( HC_PRESENT, "Hypothesis 2.0.0 can't be made tolerant of hard to " "meet requirements (like `is_prime()`), the test " "case times-out on it", ) @settings(**HYP_SLOW_SETTINGS) @given(st_comp_with_com_fac()) def test_gcd_with_com_factor(self, numbers): n = gcd(numbers) assert 1 in numbers or n != 1 for i in numbers: assert i % n == 0 @unittest.skipUnless( HC_PRESENT, "Hypothesis 2.0.0 can't be made tolerant of hard to " "meet requirements (like `is_prime()`), the test " "case times-out on it", ) @settings(**HYP_SLOW_SETTINGS) @given(st_comp_no_com_fac()) def test_gcd_with_uncom_factor(self, numbers): n = gcd(numbers) assert n == 1 @given( st.lists( st.integers(min_value=1, max_value=2**8192), min_size=1, max_size=20, ) ) def test_gcd_with_random_numbers(self, numbers): n = gcd(numbers) for i in numbers: # check that at least it's a divider assert i % n == 0 def test_lcm(self): assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7 assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7 assert lcm(3) == 3 @given( st.lists( st.integers(min_value=1, max_value=2**8192), min_size=1, max_size=20, ) ) def test_lcm_with_random_numbers(self, numbers): n = lcm(numbers) for i in numbers: assert n % i == 0 @unittest.skipUnless( HC_PRESENT, "Hypothesis 2.0.0 can't be made tolerant of hard to " "meet requirements (like `is_prime()`), the test " "case times-out on it", ) @settings(**HYP_SETTINGS) @given(st_num_square_prime()) def test_square_root_mod_prime(self, vals): square, prime = vals calc = square_root_mod_prime(square, prime) assert calc * calc % prime == square @settings(**HYP_SETTINGS) @given(st.integers(min_value=1, max_value=10**12)) @example(265399 * 1526929) @example(373297**2 * 553991) def test_factorization(self, num): factors = factorization(num) mult = 1 for i in factors: mult *= i[0] ** i[1] assert mult == num def test_factorisation_smallprimes(self): exp = 101 * 103 assert 101 in smallprimes assert 103 in smallprimes factors = factorization(exp) mult = 1 for i in factors: mult *= i[0] ** i[1] assert mult == exp def test_factorisation_not_smallprimes(self): exp = 1231 * 1237 assert 1231 not in smallprimes assert 1237 not in smallprimes factors = factorization(exp) mult = 1 for i in factors: mult *= i[0] ** i[1] assert mult == exp def test_jacobi_with_zero(self): assert jacobi(0, 3) == 0 def test_jacobi_with_one(self): assert jacobi(1, 3) == 1 @settings(**HYP_SETTINGS) @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2)) def test_jacobi(self, mod): if is_prime(mod): squares = set() for root in range(1, mod): assert jacobi(root * root, mod) == 1 squares.add(root * root % mod) for i in range(1, mod): if i not in squares: assert jacobi(i, mod) == -1 else: factors = factorization(mod) for a in range(1, mod): c = 1 for i in factors: c *= jacobi(a, i[0]) ** i[1] assert c == jacobi(a, mod) @given(st_two_nums_rel_prime()) def test_inverse_mod(self, nums): num, mod = nums inv = inverse_mod(num, mod) assert 0 < inv < mod assert num * inv % mod == 1 def test_inverse_mod_with_zero(self): assert 0 == inverse_mod(0, 11) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1650905591.0 ecdsa-0.18.0/src/ecdsa/test_pyecdsa.py0000664005075200507520000024150514231550767017201 0ustar00hkariohkariofrom __future__ import with_statement, division try: import unittest2 as unittest except ImportError: import unittest import os import sys import shutil import subprocess import pytest from binascii import hexlify, unhexlify from hashlib import sha1, sha256, sha384, sha512 import hashlib from functools import partial from hypothesis import given import hypothesis.strategies as st from six import b, print_, binary_type from .keys import SigningKey, VerifyingKey from .keys import BadSignatureError, MalformedPointError, BadDigestError from . import util from .util import sigencode_der, sigencode_strings from .util import sigdecode_der, sigdecode_strings from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature from .curves import Curve, UnknownCurveError from .curves import ( SECP112r1, SECP112r2, SECP128r1, SECP160r1, NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, Ed25519, Ed448, curves, ) from .ecdsa import ( curve_brainpoolp224r1, curve_brainpoolp256r1, curve_brainpoolp384r1, curve_brainpoolp512r1, ) from .ellipticcurve import Point from . import der from . import rfc6979 from . import ecdsa class SubprocessError(Exception): pass def run_openssl(cmd): OPENSSL = "openssl" p = subprocess.Popen( [OPENSSL] + cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) stdout, ignored = p.communicate() if p.returncode != 0: raise SubprocessError( "cmd '%s %s' failed: rc=%s, stdout/err was %s" % (OPENSSL, cmd, p.returncode, stdout) ) return stdout.decode() class ECDSA(unittest.TestCase): def test_basic(self): priv = SigningKey.generate() pub = priv.get_verifying_key() data = b("blahblah") sig = priv.sign(data) self.assertTrue(pub.verify(sig, data)) self.assertRaises(BadSignatureError, pub.verify, sig, data + b("bad")) pub2 = VerifyingKey.from_string(pub.to_string()) self.assertTrue(pub2.verify(sig, data)) def test_deterministic(self): data = b("blahblah") secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16) priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256) pub = priv.get_verifying_key() k = rfc6979.generate_k( SECP256k1.generator.order(), secexp, sha256, sha256(data).digest() ) sig1 = priv.sign(data, k=k) self.assertTrue(pub.verify(sig1, data)) sig2 = priv.sign(data, k=k) self.assertTrue(pub.verify(sig2, data)) sig3 = priv.sign_deterministic(data, sha256) self.assertTrue(pub.verify(sig3, data)) self.assertEqual(sig1, sig2) self.assertEqual(sig1, sig3) def test_bad_usage(self): # sk=SigningKey() is wrong self.assertRaises(TypeError, SigningKey) self.assertRaises(TypeError, VerifyingKey) def test_lengths(self): default = NIST192p priv = SigningKey.generate() pub = priv.get_verifying_key() self.assertEqual(len(pub.to_string()), default.verifying_key_length) sig = priv.sign(b("data")) self.assertEqual(len(sig), default.signature_length) for curve in ( NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, ): priv = SigningKey.generate(curve=curve) pub1 = priv.get_verifying_key() pub2 = VerifyingKey.from_string(pub1.to_string(), curve) self.assertEqual(pub1.to_string(), pub2.to_string()) self.assertEqual(len(pub1.to_string()), curve.verifying_key_length) sig = priv.sign(b("data")) self.assertEqual(len(sig), curve.signature_length) def test_serialize(self): seed = b("secret") curve = NIST192p secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order) secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order) self.assertEqual(secexp1, secexp2) priv1 = SigningKey.from_secret_exponent(secexp1, curve) priv2 = SigningKey.from_secret_exponent(secexp2, curve) self.assertEqual( hexlify(priv1.to_string()), hexlify(priv2.to_string()) ) self.assertEqual(priv1.to_pem(), priv2.to_pem()) pub1 = priv1.get_verifying_key() pub2 = priv2.get_verifying_key() data = b("data") sig1 = priv1.sign(data) sig2 = priv2.sign(data) self.assertTrue(pub1.verify(sig1, data)) self.assertTrue(pub2.verify(sig1, data)) self.assertTrue(pub1.verify(sig2, data)) self.assertTrue(pub2.verify(sig2, data)) self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string())) def test_nonrandom(self): s = b("all the entropy in the entire world, compressed into one line") def not_much_entropy(numbytes): return s[:numbytes] # we control the entropy source, these two keys should be identical: priv1 = SigningKey.generate(entropy=not_much_entropy) priv2 = SigningKey.generate(entropy=not_much_entropy) self.assertEqual( hexlify(priv1.get_verifying_key().to_string()), hexlify(priv2.get_verifying_key().to_string()), ) # likewise, signatures should be identical. Obviously you'd never # want to do this with keys you care about, because the secrecy of # the private key depends upon using different random numbers for # each signature sig1 = priv1.sign(b("data"), entropy=not_much_entropy) sig2 = priv2.sign(b("data"), entropy=not_much_entropy) self.assertEqual(hexlify(sig1), hexlify(sig2)) def assertTruePrivkeysEqual(self, priv1, priv2): self.assertEqual( priv1.privkey.secret_multiplier, priv2.privkey.secret_multiplier ) self.assertEqual( priv1.privkey.public_key.generator, priv2.privkey.public_key.generator, ) def test_privkey_creation(self): s = b("all the entropy in the entire world, compressed into one line") def not_much_entropy(numbytes): return s[:numbytes] priv1 = SigningKey.generate() self.assertEqual(priv1.baselen, NIST192p.baselen) priv1 = SigningKey.generate(curve=NIST224p) self.assertEqual(priv1.baselen, NIST224p.baselen) priv1 = SigningKey.generate(entropy=not_much_entropy) self.assertEqual(priv1.baselen, NIST192p.baselen) priv2 = SigningKey.generate(entropy=not_much_entropy) self.assertEqual(priv2.baselen, NIST192p.baselen) self.assertTruePrivkeysEqual(priv1, priv2) priv1 = SigningKey.from_secret_exponent(secexp=3) self.assertEqual(priv1.baselen, NIST192p.baselen) priv2 = SigningKey.from_secret_exponent(secexp=3) self.assertTruePrivkeysEqual(priv1, priv2) priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p) self.assertEqual(priv1.baselen, NIST224p.baselen) def test_privkey_strings(self): priv1 = SigningKey.generate() s1 = priv1.to_string() self.assertEqual(type(s1), binary_type) self.assertEqual(len(s1), NIST192p.baselen) priv2 = SigningKey.from_string(s1) self.assertTruePrivkeysEqual(priv1, priv2) s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) s1 = priv1.to_der() self.assertEqual(type(s1), binary_type) priv2 = SigningKey.from_der(s1) self.assertTruePrivkeysEqual(priv1, priv2) priv1 = SigningKey.generate(curve=NIST256p) s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) s1 = priv1.to_der() self.assertEqual(type(s1), binary_type) priv2 = SigningKey.from_der(s1) self.assertTruePrivkeysEqual(priv1, priv2) def test_privkey_strings_brainpool(self): priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) s1 = priv1.to_der() self.assertEqual(type(s1), binary_type) priv2 = SigningKey.from_der(s1) self.assertTruePrivkeysEqual(priv1, priv2) def assertTruePubkeysEqual(self, pub1, pub2): self.assertEqual(pub1.pubkey.point, pub2.pubkey.point) self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator) self.assertEqual(pub1.curve, pub2.curve) def test_pubkey_strings(self): priv1 = SigningKey.generate() pub1 = priv1.get_verifying_key() s1 = pub1.to_string() self.assertEqual(type(s1), binary_type) self.assertEqual(len(s1), NIST192p.verifying_key_length) pub2 = VerifyingKey.from_string(s1) self.assertTruePubkeysEqual(pub1, pub2) priv1 = SigningKey.generate(curve=NIST256p) pub1 = priv1.get_verifying_key() s1 = pub1.to_string() self.assertEqual(type(s1), binary_type) self.assertEqual(len(s1), NIST256p.verifying_key_length) pub2 = VerifyingKey.from_string(s1, curve=NIST256p) self.assertTruePubkeysEqual(pub1, pub2) pub1_der = pub1.to_der() self.assertEqual(type(pub1_der), binary_type) pub2 = VerifyingKey.from_der(pub1_der) self.assertTruePubkeysEqual(pub1, pub2) self.assertRaises( der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b("junk") ) badpub = VerifyingKey.from_der(pub1_der) class FakeGenerator: def order(self): return 123456789 class FakeCurveFp: def p(self): return int( "6525534529039240705020950546962731340" "4541085228058844382513856749047873406763" ) badcurve = Curve( "unknown", FakeCurveFp(), FakeGenerator(), (1, 2, 3, 4, 5, 6), None ) badpub.curve = badcurve badder = badpub.to_der() self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder) pem = pub1.to_pem() self.assertEqual(type(pem), binary_type) self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem) self.assertTrue( pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem ) pub2 = VerifyingKey.from_pem(pem) self.assertTruePubkeysEqual(pub1, pub2) def test_pubkey_strings_brainpool(self): priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) pub1 = priv1.get_verifying_key() s1 = pub1.to_string() self.assertEqual(type(s1), binary_type) self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length) pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1) self.assertTruePubkeysEqual(pub1, pub2) pub1_der = pub1.to_der() self.assertEqual(type(pub1_der), binary_type) pub2 = VerifyingKey.from_der(pub1_der) self.assertTruePubkeysEqual(pub1, pub2) def test_vk_to_der_with_invalid_point_encoding(self): sk = SigningKey.generate() vk = sk.verifying_key with self.assertRaises(ValueError): vk.to_der("raw") def test_sk_to_der_with_invalid_point_encoding(self): sk = SigningKey.generate() with self.assertRaises(ValueError): sk.to_der("raw") def test_vk_from_der_garbage_after_curve_oid(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b( "garbage" ) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\x00\xff", None) to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): VerifyingKey.from_der(to_decode) def test_vk_from_der_invalid_key_type(self): type_oid_der = der.encode_oid(*(1, 2, 3)) curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\x00\xff", None) to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): VerifyingKey.from_der(to_decode) def test_vk_from_der_garbage_after_point_string(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\x00\xff", None) + b("garbage") to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): VerifyingKey.from_der(to_decode) def test_vk_from_der_invalid_bitstring(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\x08\xff", None) to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): VerifyingKey.from_der(to_decode) def test_vk_from_der_with_invalid_length_of_encoding(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\xff" * 64, 0) to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(MalformedPointError): VerifyingKey.from_der(to_decode) def test_vk_from_der_with_raw_encoding(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\xff" * 48, 0) to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): VerifyingKey.from_der(to_decode) def test_signature_strings(self): priv1 = SigningKey.generate() pub1 = priv1.get_verifying_key() data = b("data") sig = priv1.sign(data) self.assertEqual(type(sig), binary_type) self.assertEqual(len(sig), NIST192p.signature_length) self.assertTrue(pub1.verify(sig, data)) sig = priv1.sign(data, sigencode=sigencode_strings) self.assertEqual(type(sig), tuple) self.assertEqual(len(sig), 2) self.assertEqual(type(sig[0]), binary_type) self.assertEqual(type(sig[1]), binary_type) self.assertEqual(len(sig[0]), NIST192p.baselen) self.assertEqual(len(sig[1]), NIST192p.baselen) self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings)) sig_der = priv1.sign(data, sigencode=sigencode_der) self.assertEqual(type(sig_der), binary_type) self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der)) def test_sig_decode_strings_with_invalid_count(self): with self.assertRaises(MalformedSignature): sigdecode_strings([b("one"), b("two"), b("three")], 0xFF) def test_sig_decode_strings_with_wrong_r_len(self): with self.assertRaises(MalformedSignature): sigdecode_strings([b("one"), b("two")], 0xFF) def test_sig_decode_strings_with_wrong_s_len(self): with self.assertRaises(MalformedSignature): sigdecode_strings([b("\xa0"), b("\xb0\xff")], 0xFF) def test_verify_with_too_long_input(self): sk = SigningKey.generate() vk = sk.verifying_key with self.assertRaises(BadDigestError): vk.verify_digest(None, b("\x00") * 128) def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): with self.assertRaises(MalformedPointError): SigningKey.from_secret_exponent(0) def test_sk_from_string_with_wrong_len_string(self): with self.assertRaises(MalformedPointError): SigningKey.from_string(b("\x01")) def test_sk_from_der_with_junk_after_sequence(self): ver_der = der.encode_integer(1) to_decode = der.encode_sequence(ver_der) + b("garbage") with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_der_with_wrong_version(self): ver_der = der.encode_integer(0) to_decode = der.encode_sequence(ver_der) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_der_invalid_const_tag(self): ver_der = der.encode_integer(1) privkey_der = der.encode_octet_string(b("\x00\xff")) curve_oid_der = der.encode_oid(*(1, 2, 3)) const_der = der.encode_constructed(1, curve_oid_der) to_decode = der.encode_sequence( ver_der, privkey_der, const_der, curve_oid_der ) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_der_garbage_after_privkey_oid(self): ver_der = der.encode_integer(1) privkey_der = der.encode_octet_string(b("\x00\xff")) curve_oid_der = der.encode_oid(*(1, 2, 3)) + b("garbage") const_der = der.encode_constructed(0, curve_oid_der) to_decode = der.encode_sequence( ver_der, privkey_der, const_der, curve_oid_der ) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_der_with_short_privkey(self): ver_der = der.encode_integer(1) privkey_der = der.encode_octet_string(b("\x00\xff")) curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) const_der = der.encode_constructed(0, curve_oid_der) to_decode = der.encode_sequence( ver_der, privkey_der, const_der, curve_oid_der ) sk = SigningKey.from_der(to_decode) self.assertEqual(sk.privkey.secret_multiplier, 255) def test_sk_from_p8_der_with_wrong_version(self): ver_der = der.encode_integer(2) algorithm_der = der.encode_sequence( der.encode_oid(1, 2, 840, 10045, 2, 1), der.encode_oid(1, 2, 840, 10045, 3, 1, 1), ) privkey_der = der.encode_octet_string( der.encode_sequence( der.encode_integer(1), der.encode_octet_string(b"\x00\xff") ) ) to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_p8_der_with_wrong_algorithm(self): ver_der = der.encode_integer(1) algorithm_der = der.encode_sequence( der.encode_oid(1, 2, 3), der.encode_oid(1, 2, 840, 10045, 3, 1, 1) ) privkey_der = der.encode_octet_string( der.encode_sequence( der.encode_integer(1), der.encode_octet_string(b"\x00\xff") ) ) to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_p8_der_with_trailing_junk_after_algorithm(self): ver_der = der.encode_integer(1) algorithm_der = der.encode_sequence( der.encode_oid(1, 2, 840, 10045, 2, 1), der.encode_oid(1, 2, 840, 10045, 3, 1, 1), der.encode_octet_string(b"junk"), ) privkey_der = der.encode_octet_string( der.encode_sequence( der.encode_integer(1), der.encode_octet_string(b"\x00\xff") ) ) to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sk_from_p8_der_with_trailing_junk_after_key(self): ver_der = der.encode_integer(1) algorithm_der = der.encode_sequence( der.encode_oid(1, 2, 840, 10045, 2, 1), der.encode_oid(1, 2, 840, 10045, 3, 1, 1), ) privkey_der = der.encode_octet_string( der.encode_sequence( der.encode_integer(1), der.encode_octet_string(b"\x00\xff") ) + der.encode_integer(999) ) to_decode = der.encode_sequence( ver_der, algorithm_der, privkey_der, der.encode_octet_string(b"junk"), ) with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) def test_sign_with_too_long_hash(self): sk = SigningKey.from_secret_exponent(12) with self.assertRaises(BadDigestError): sk.sign_digest(b("\xff") * 64) def test_hashfunc(self): sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256) data = b("security level is 128 bits") sig = sk.sign(data) vk = VerifyingKey.from_string( sk.get_verifying_key().to_string(), curve=NIST256p, hashfunc=sha256 ) self.assertTrue(vk.verify(sig, data)) sk2 = SigningKey.generate(curve=NIST256p) sig2 = sk2.sign(data, hashfunc=sha256) vk2 = VerifyingKey.from_string( sk2.get_verifying_key().to_string(), curve=NIST256p, hashfunc=sha256, ) self.assertTrue(vk2.verify(sig2, data)) vk3 = VerifyingKey.from_string( sk.get_verifying_key().to_string(), curve=NIST256p ) self.assertTrue(vk3.verify(sig, data, hashfunc=sha256)) def test_public_key_recovery(self): # Create keys curve = BRAINPOOLP160r1 sk = SigningKey.generate(curve=curve) vk = sk.get_verifying_key() # Sign a message data = b("blahblah") signature = sk.sign(data) # Recover verifying keys recovered_vks = VerifyingKey.from_public_key_recovery( signature, data, curve ) # Test if each pk is valid for recovered_vk in recovered_vks: # Test if recovered vk is valid for the data self.assertTrue(recovered_vk.verify(signature, data)) # Test if properties are equal self.assertEqual(vk.curve, recovered_vk.curve) self.assertEqual( vk.default_hashfunc, recovered_vk.default_hashfunc ) # Test if original vk is the list of recovered keys self.assertIn( vk.pubkey.point, [recovered_vk.pubkey.point for recovered_vk in recovered_vks], ) def test_public_key_recovery_with_custom_hash(self): # Create keys curve = BRAINPOOLP160r1 sk = SigningKey.generate(curve=curve, hashfunc=sha256) vk = sk.get_verifying_key() # Sign a message data = b("blahblah") signature = sk.sign(data) # Recover verifying keys recovered_vks = VerifyingKey.from_public_key_recovery( signature, data, curve, hashfunc=sha256, allow_truncate=True ) # Test if each pk is valid for recovered_vk in recovered_vks: # Test if recovered vk is valid for the data self.assertTrue(recovered_vk.verify(signature, data)) # Test if properties are equal self.assertEqual(vk.curve, recovered_vk.curve) self.assertEqual(sha256, recovered_vk.default_hashfunc) # Test if original vk is the list of recovered keys self.assertIn( vk.pubkey.point, [recovered_vk.pubkey.point for recovered_vk in recovered_vks], ) def test_encoding(self): sk = SigningKey.from_secret_exponent(123456789) vk = sk.verifying_key exp = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) self.assertEqual(vk.to_string(), exp) self.assertEqual(vk.to_string("raw"), exp) self.assertEqual(vk.to_string("uncompressed"), b("\x04") + exp) self.assertEqual(vk.to_string("compressed"), b("\x02") + exp[:24]) self.assertEqual(vk.to_string("hybrid"), b("\x06") + exp) def test_decoding(self): sk = SigningKey.from_secret_exponent(123456789) vk = sk.verifying_key enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) from_raw = VerifyingKey.from_string(enc) self.assertEqual(from_raw.pubkey.point, vk.pubkey.point) from_uncompressed = VerifyingKey.from_string(b("\x04") + enc) self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) from_compressed = VerifyingKey.from_string(b("\x02") + enc[:24]) self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point) from_uncompressed = VerifyingKey.from_string(b("\x06") + enc) self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) def test_uncompressed_decoding_as_only_alowed(self): enc = b( "\x04" "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) vk = VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) sk = SigningKey.from_secret_exponent(123456789) self.assertEqual(vk, sk.verifying_key) def test_raw_decoding_with_blocked_format(self): enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError) as exp: VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) self.assertIn("hybrid", str(exp.exception)) def test_decoding_with_unknown_format(self): with self.assertRaises(ValueError) as e: VerifyingKey.from_string(b"", valid_encodings=("raw", "foobar")) self.assertIn("Only uncompressed, compressed", str(e.exception)) def test_uncompressed_decoding_with_blocked_format(self): enc = b( "\x04" "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError) as exp: VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) self.assertIn("Invalid X9.62 encoding", str(exp.exception)) def test_hybrid_decoding_with_blocked_format(self): enc = b( "\x06" "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError) as exp: VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) self.assertIn("Invalid X9.62 encoding", str(exp.exception)) def test_compressed_decoding_with_blocked_format(self): enc = b( "\x02" "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" )[:25] with self.assertRaises(MalformedPointError) as exp: VerifyingKey.from_string(enc, valid_encodings=("hybrid", "raw")) self.assertIn("(hybrid, raw)", str(exp.exception)) def test_decoding_with_malformed_uncompressed(self): enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x02") + enc) def test_decoding_with_malformed_compressed(self): enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x01") + enc[:24]) def test_decoding_with_inconsistent_hybrid(self): enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x07") + enc) def test_decoding_with_point_not_on_curve(self): enc = b( "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): VerifyingKey.from_string(enc[:47] + b("\x00")) def test_decoding_with_point_at_infinity(self): # decoding it is unsupported, as it's not necessary to encode it with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x00")) def test_not_lying_on_curve(self): enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1) with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x02") + enc) def test_from_string_with_invalid_curve_too_short_ver_key_len(self): # both verifying_key_length and baselen are calculated internally # by the Curve constructor, but since we depend on them verify # that inconsistent values are detected curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) curve.verifying_key_length = 16 curve.baselen = 32 with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x00") * 16, curve) def test_from_string_with_invalid_curve_too_long_ver_key_len(self): # both verifying_key_length and baselen are calculated internally # by the Curve constructor, but since we depend on them verify # that inconsistent values are detected curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) curve.verifying_key_length = 16 curve.baselen = 16 with self.assertRaises(MalformedPointError): VerifyingKey.from_string(b("\x00") * 16, curve) @pytest.mark.parametrize( "val,even", [(i, j) for i in range(256) for j in [True, False]] ) def test_VerifyingKey_decode_with_small_values(val, even): enc = number_to_string(val, NIST192p.order) if even: enc = b("\x02") + enc else: enc = b("\x03") + enc # small values can both be actual valid public keys and not, verify that # only expected exceptions are raised if they are not try: vk = VerifyingKey.from_string(enc) assert isinstance(vk, VerifyingKey) except MalformedPointError: assert True params = [] for curve in curves: for enc in ["raw", "uncompressed", "compressed", "hybrid"]: params.append( pytest.param(curve, enc, id="{0}-{1}".format(curve.name, enc)) ) @pytest.mark.parametrize("curve,encoding", params) def test_VerifyingKey_encode_decode(curve, encoding): sk = SigningKey.generate(curve=curve) vk = sk.verifying_key encoded = vk.to_string(encoding) from_enc = VerifyingKey.from_string(encoded, curve=curve) assert vk.pubkey.point == from_enc.pubkey.point class OpenSSL(unittest.TestCase): # test interoperability with OpenSSL tools. Note that openssl's ECDSA # sign/verify arguments changed between 0.9.8 and 1.0.0: the early # versions require "-ecdsa-with-SHA1", the later versions want just # "-SHA1" (or to leave out that argument entirely, which means the # signature will use some default digest algorithm, probably determined # by the key, probably always SHA1). # # openssl ecparam -name secp224r1 -genkey -out privkey.pem # openssl ec -in privkey.pem -text -noout # get the priv/pub keys # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt # openssl asn1parse -in data.sig -inform DER # data.sig is 64 bytes, probably 56b plus ASN1 overhead # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $? # openssl ec -in privkey.pem -pubout -out pubkey.pem # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der OPENSSL_SUPPORTED_CURVES = set( c.split(":")[0].strip() for c in run_openssl("ecparam -list_curves").split("\n") ) def get_openssl_messagedigest_arg(self, hash_name): v = run_openssl("version") # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010", # or "OpenSSL 0.9.8o 01 Jun 2010" vs = v.split()[1].split(".") if vs >= ["1", "0", "0"]: # pragma: no cover return "-{0}".format(hash_name) else: # pragma: no cover return "-ecdsa-with-{0}".format(hash_name) # sk: 1:OpenSSL->python 2:python->OpenSSL # vk: 3:OpenSSL->python 4:python->OpenSSL # sig: 5:OpenSSL->python 6:python->OpenSSL @pytest.mark.skipif( "secp112r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp112r1", ) def test_from_openssl_secp112r1(self): return self.do_test_from_openssl(SECP112r1) @pytest.mark.skipif( "secp112r2" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp112r2", ) def test_from_openssl_secp112r2(self): return self.do_test_from_openssl(SECP112r2) @pytest.mark.skipif( "secp128r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp128r1", ) def test_from_openssl_secp128r1(self): return self.do_test_from_openssl(SECP128r1) @pytest.mark.skipif( "secp160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp160r1", ) def test_from_openssl_secp160r1(self): return self.do_test_from_openssl(SECP160r1) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_from_openssl_nist192p(self): return self.do_test_from_openssl(NIST192p) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_from_openssl_nist192p_sha256(self): return self.do_test_from_openssl(NIST192p, "SHA256") @pytest.mark.skipif( "secp224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp224r1", ) def test_from_openssl_nist224p(self): return self.do_test_from_openssl(NIST224p) @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_from_openssl_nist256p(self): return self.do_test_from_openssl(NIST256p) @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_from_openssl_nist256p_sha384(self): return self.do_test_from_openssl(NIST256p, "SHA384") @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_from_openssl_nist256p_sha512(self): return self.do_test_from_openssl(NIST256p, "SHA512") @pytest.mark.skipif( "secp384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp384r1", ) def test_from_openssl_nist384p(self): return self.do_test_from_openssl(NIST384p) @pytest.mark.skipif( "secp521r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp521r1", ) def test_from_openssl_nist521p(self): return self.do_test_from_openssl(NIST521p) @pytest.mark.skipif( "secp256k1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp256k1", ) def test_from_openssl_secp256k1(self): return self.do_test_from_openssl(SECP256k1) @pytest.mark.skipif( "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP160r1", ) def test_from_openssl_brainpoolp160r1(self): return self.do_test_from_openssl(BRAINPOOLP160r1) @pytest.mark.skipif( "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP192r1", ) def test_from_openssl_brainpoolp192r1(self): return self.do_test_from_openssl(BRAINPOOLP192r1) @pytest.mark.skipif( "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP224r1", ) def test_from_openssl_brainpoolp224r1(self): return self.do_test_from_openssl(BRAINPOOLP224r1) @pytest.mark.skipif( "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP256r1", ) def test_from_openssl_brainpoolp256r1(self): return self.do_test_from_openssl(BRAINPOOLP256r1) @pytest.mark.skipif( "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP320r1", ) def test_from_openssl_brainpoolp320r1(self): return self.do_test_from_openssl(BRAINPOOLP320r1) @pytest.mark.skipif( "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP384r1", ) def test_from_openssl_brainpoolp384r1(self): return self.do_test_from_openssl(BRAINPOOLP384r1) @pytest.mark.skipif( "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP512r1", ) def test_from_openssl_brainpoolp512r1(self): return self.do_test_from_openssl(BRAINPOOLP512r1) def do_test_from_openssl(self, curve, hash_name="SHA1"): curvename = curve.openssl_name assert curvename # OpenSSL: create sk, vk, sign. # Python: read vk(3), checksig(5), read sk(1), sign, check mdarg = self.get_openssl_messagedigest_arg(hash_name) if os.path.isdir("t"): # pragma: no cover shutil.rmtree("t") os.mkdir("t") run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename) run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem") data = b("data") with open("t/data.txt", "wb") as e: e.write(data) run_openssl( "dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg ) run_openssl( "dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg ) with open("t/pubkey.pem", "rb") as e: pubkey_pem = e.read() vk = VerifyingKey.from_pem(pubkey_pem) # 3 with open("t/data.sig", "rb") as e: sig_der = e.read() self.assertTrue( vk.verify( sig_der, data, # 5 hashfunc=partial(hashlib.new, hash_name), sigdecode=sigdecode_der, ) ) with open("t/privkey.pem") as e: fp = e.read() sk = SigningKey.from_pem(fp) # 1 sig = sk.sign(data, hashfunc=partial(hashlib.new, hash_name)) self.assertTrue( vk.verify(sig, data, hashfunc=partial(hashlib.new, hash_name)) ) run_openssl( "pkcs8 -topk8 -nocrypt " "-in t/privkey.pem -outform pem -out t/privkey-p8.pem" ) with open("t/privkey-p8.pem", "rb") as e: privkey_p8_pem = e.read() sk_from_p8 = SigningKey.from_pem(privkey_p8_pem) self.assertEqual(sk, sk_from_p8) @pytest.mark.skipif( "secp112r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp112r1", ) def test_to_openssl_secp112r1(self): self.do_test_to_openssl(SECP112r1) @pytest.mark.skipif( "secp112r2" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp112r2", ) def test_to_openssl_secp112r2(self): self.do_test_to_openssl(SECP112r2) @pytest.mark.skipif( "secp128r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp128r1", ) def test_to_openssl_secp128r1(self): self.do_test_to_openssl(SECP128r1) @pytest.mark.skipif( "secp160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp160r1", ) def test_to_openssl_secp160r1(self): self.do_test_to_openssl(SECP160r1) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_to_openssl_nist192p(self): self.do_test_to_openssl(NIST192p) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_to_openssl_nist192p_sha256(self): self.do_test_to_openssl(NIST192p, "SHA256") @pytest.mark.skipif( "secp224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp224r1", ) def test_to_openssl_nist224p(self): self.do_test_to_openssl(NIST224p) @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_to_openssl_nist256p(self): self.do_test_to_openssl(NIST256p) @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_to_openssl_nist256p_sha384(self): self.do_test_to_openssl(NIST256p, "SHA384") @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", ) def test_to_openssl_nist256p_sha512(self): self.do_test_to_openssl(NIST256p, "SHA512") @pytest.mark.skipif( "secp384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp384r1", ) def test_to_openssl_nist384p(self): self.do_test_to_openssl(NIST384p) @pytest.mark.skipif( "secp521r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp521r1", ) def test_to_openssl_nist521p(self): self.do_test_to_openssl(NIST521p) @pytest.mark.skipif( "secp256k1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp256k1", ) def test_to_openssl_secp256k1(self): self.do_test_to_openssl(SECP256k1) @pytest.mark.skipif( "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP160r1", ) def test_to_openssl_brainpoolp160r1(self): self.do_test_to_openssl(BRAINPOOLP160r1) @pytest.mark.skipif( "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP192r1", ) def test_to_openssl_brainpoolp192r1(self): self.do_test_to_openssl(BRAINPOOLP192r1) @pytest.mark.skipif( "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP224r1", ) def test_to_openssl_brainpoolp224r1(self): self.do_test_to_openssl(BRAINPOOLP224r1) @pytest.mark.skipif( "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP256r1", ) def test_to_openssl_brainpoolp256r1(self): self.do_test_to_openssl(BRAINPOOLP256r1) @pytest.mark.skipif( "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP320r1", ) def test_to_openssl_brainpoolp320r1(self): self.do_test_to_openssl(BRAINPOOLP320r1) @pytest.mark.skipif( "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP384r1", ) def test_to_openssl_brainpoolp384r1(self): self.do_test_to_openssl(BRAINPOOLP384r1) @pytest.mark.skipif( "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP512r1", ) def test_to_openssl_brainpoolp512r1(self): self.do_test_to_openssl(BRAINPOOLP512r1) def do_test_to_openssl(self, curve, hash_name="SHA1"): curvename = curve.openssl_name assert curvename # Python: create sk, vk, sign. # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check mdarg = self.get_openssl_messagedigest_arg(hash_name) if os.path.isdir("t"): # pragma: no cover shutil.rmtree("t") os.mkdir("t") sk = SigningKey.generate(curve=curve) vk = sk.get_verifying_key() data = b("data") with open("t/pubkey.der", "wb") as e: e.write(vk.to_der()) # 4 with open("t/pubkey.pem", "wb") as e: e.write(vk.to_pem()) # 4 sig_der = sk.sign( data, hashfunc=partial(hashlib.new, hash_name), sigencode=sigencode_der, ) with open("t/data.sig", "wb") as e: e.write(sig_der) # 6 with open("t/data.txt", "wb") as e: e.write(data) with open("t/baddata.txt", "wb") as e: e.write(data + b("corrupt")) self.assertRaises( SubprocessError, run_openssl, "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg, ) run_openssl( "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg ) with open("t/privkey.pem", "wb") as e: e.write(sk.to_pem()) # 2 run_openssl( "dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg ) run_openssl( "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg ) with open("t/privkey-explicit.pem", "wb") as e: e.write(sk.to_pem(curve_parameters_encoding="explicit")) run_openssl( "dgst %s -sign t/privkey-explicit.pem -out t/data.sig2 t/data.txt" % mdarg ) run_openssl( "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg ) with open("t/privkey-p8.pem", "wb") as e: e.write(sk.to_pem(format="pkcs8")) run_openssl( "dgst %s -sign t/privkey-p8.pem -out t/data.sig3 t/data.txt" % mdarg ) run_openssl( "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" % mdarg ) with open("t/privkey-p8-explicit.pem", "wb") as e: e.write( sk.to_pem(format="pkcs8", curve_parameters_encoding="explicit") ) run_openssl( "dgst %s -sign t/privkey-p8-explicit.pem -out t/data.sig3 t/data.txt" % mdarg ) run_openssl( "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" % mdarg ) OPENSSL_SUPPORTED_TYPES = set() try: if "-rawin" in run_openssl("pkeyutl -help"): OPENSSL_SUPPORTED_TYPES = set( c.lower() for c in ("ED25519", "ED448") if c in run_openssl("list -public-key-methods") ) except SubprocessError: pass def do_eddsa_test_to_openssl(self, curve): curvename = curve.name.upper() if os.path.isdir("t"): shutil.rmtree("t") os.mkdir("t") sk = SigningKey.generate(curve=curve) vk = sk.get_verifying_key() data = b"data" with open("t/pubkey.der", "wb") as e: e.write(vk.to_der()) with open("t/pubkey.pem", "wb") as e: e.write(vk.to_pem()) sig = sk.sign(data) with open("t/data.sig", "wb") as e: e.write(sig) with open("t/data.txt", "wb") as e: e.write(data) with open("t/baddata.txt", "wb") as e: e.write(data + b"corrupt") with self.assertRaises(SubprocessError): run_openssl( "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " "-in t/baddata.txt -sigfile t/data.sig" ) run_openssl( "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " "-in t/data.txt -sigfile t/data.sig" ) shutil.rmtree("t") # in practice at least OpenSSL 3.0.0 is needed to make EdDSA signatures # earlier versions support EdDSA only in X.509 certificates @pytest.mark.skipif( "ed25519" not in OPENSSL_SUPPORTED_TYPES, reason="system openssl does not support signing with Ed25519", ) def test_to_openssl_ed25519(self): return self.do_eddsa_test_to_openssl(Ed25519) @pytest.mark.skipif( "ed448" not in OPENSSL_SUPPORTED_TYPES, reason="system openssl does not support signing with Ed448", ) def test_to_openssl_ed448(self): return self.do_eddsa_test_to_openssl(Ed448) def do_eddsa_test_from_openssl(self, curve): curvename = curve.name if os.path.isdir("t"): shutil.rmtree("t") os.mkdir("t") data = b"data" run_openssl( "genpkey -algorithm {0} -outform PEM -out t/privkey.pem".format( curvename ) ) run_openssl( "pkey -outform PEM -pubout -in t/privkey.pem -out t/pubkey.pem" ) with open("t/data.txt", "wb") as e: e.write(data) run_openssl( "pkeyutl -sign -inkey t/privkey.pem " "-rawin -in t/data.txt -out t/data.sig" ) with open("t/data.sig", "rb") as e: sig = e.read() with open("t/pubkey.pem", "rb") as e: vk = VerifyingKey.from_pem(e.read()) self.assertIs(vk.curve, curve) vk.verify(sig, data) shutil.rmtree("t") @pytest.mark.skipif( "ed25519" not in OPENSSL_SUPPORTED_TYPES, reason="system openssl does not support signing with Ed25519", ) def test_from_openssl_ed25519(self): return self.do_eddsa_test_from_openssl(Ed25519) @pytest.mark.skipif( "ed448" not in OPENSSL_SUPPORTED_TYPES, reason="system openssl does not support signing with Ed448", ) def test_from_openssl_ed448(self): return self.do_eddsa_test_from_openssl(Ed448) class TooSmallCurve(unittest.TestCase): OPENSSL_SUPPORTED_CURVES = set( c.split(":")[0].strip() for c in run_openssl("ecparam -list_curves").split("\n") ) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_sign_too_small_curve_dont_allow_truncate_raises(self): sk = SigningKey.generate(curve=NIST192p) data = b("data") with self.assertRaises(BadDigestError): sk.sign( data, hashfunc=partial(hashlib.new, "SHA256"), sigencode=sigencode_der, allow_truncate=False, ) @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", ) def test_verify_too_small_curve_dont_allow_truncate_raises(self): sk = SigningKey.generate(curve=NIST192p) vk = sk.get_verifying_key() data = b("data") sig_der = sk.sign( data, hashfunc=partial(hashlib.new, "SHA256"), sigencode=sigencode_der, allow_truncate=True, ) with self.assertRaises(BadDigestError): vk.verify( sig_der, data, hashfunc=partial(hashlib.new, "SHA256"), sigdecode=sigdecode_der, allow_truncate=False, ) class DER(unittest.TestCase): def test_integer(self): self.assertEqual(der.encode_integer(0), b("\x02\x01\x00")) self.assertEqual(der.encode_integer(1), b("\x02\x01\x01")) self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f")) self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80")) self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00")) # self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff")) def s(n): return der.remove_integer(der.encode_integer(n) + b("junk")) self.assertEqual(s(0), (0, b("junk"))) self.assertEqual(s(1), (1, b("junk"))) self.assertEqual(s(127), (127, b("junk"))) self.assertEqual(s(128), (128, b("junk"))) self.assertEqual(s(256), (256, b("junk"))) self.assertEqual( s(1234567890123456789012345678901234567890), (1234567890123456789012345678901234567890, b("junk")), ) def test_number(self): self.assertEqual(der.encode_number(0), b("\x00")) self.assertEqual(der.encode_number(127), b("\x7f")) self.assertEqual(der.encode_number(128), b("\x81\x00")) self.assertEqual(der.encode_number(3 * 128 + 7), b("\x83\x07")) # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2)) # self.assertEqual(der.encode_number(155), b("\x81\x9b")) for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155): x = der.encode_number(n) + b("more") n1, llen = der.read_number(x) self.assertEqual(n1, n) self.assertEqual(x[llen:], b("more")) def test_length(self): self.assertEqual(der.encode_length(0), b("\x00")) self.assertEqual(der.encode_length(127), b("\x7f")) self.assertEqual(der.encode_length(128), b("\x81\x80")) self.assertEqual(der.encode_length(255), b("\x81\xff")) self.assertEqual(der.encode_length(256), b("\x82\x01\x00")) self.assertEqual(der.encode_length(3 * 256 + 7), b("\x82\x03\x07")) self.assertEqual(der.read_length(b("\x81\x9b") + b("more")), (155, 2)) self.assertEqual(der.encode_length(155), b("\x81\x9b")) for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155): x = der.encode_length(n) + b("more") n1, llen = der.read_length(x) self.assertEqual(n1, n) self.assertEqual(x[llen:], b("more")) def test_sequence(self): x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI") self.assertEqual(x, b("\x30\x06ABCDEFGHI")) x1, rest = der.remove_sequence(x) self.assertEqual(x1, b("ABCDEF")) self.assertEqual(rest, b("GHI")) def test_constructed(self): x = der.encode_constructed(0, NIST224p.encoded_oid) self.assertEqual(hexlify(x), b("a007") + b("06052b81040021")) x = der.encode_constructed(1, unhexlify(b("0102030a0b0c"))) self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c")) class Util(unittest.TestCase): def test_trytryagain(self): tta = util.randrange_from_seed__trytryagain for i in range(1000): seed = "seed-%d" % i for order in ( 2**8 - 2, 2**8 - 1, 2**8, 2**8 + 1, 2**8 + 2, 2**16 - 1, 2**16 + 1, ): n = tta(seed, order) self.assertTrue(1 <= n < order, (1, n, order)) # this trytryagain *does* provide long-term stability self.assertEqual( ("%x" % (tta("seed", NIST224p.order))).encode(), b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"), ) def test_trytryagain_single(self): tta = util.randrange_from_seed__trytryagain order = 2**8 - 2 seed = b"text" n = tta(seed, order) # known issue: https://github.com/warner/python-ecdsa/issues/221 if sys.version_info < (3, 0): # pragma: no branch self.assertEqual(n, 228) else: self.assertEqual(n, 18) @given(st.integers(min_value=0, max_value=10**200)) def test_randrange(self, i): # util.randrange does not provide long-term stability: we might # change the algorithm in the future. entropy = util.PRNG("seed-%d" % i) for order in ( 2**8 - 2, 2**8 - 1, 2**8, 2**16 - 1, 2**16 + 1, ): # that oddball 2**16+1 takes half our runtime n = util.randrange(order, entropy=entropy) self.assertTrue(1 <= n < order, (1, n, order)) def OFF_test_prove_uniformity(self): # pragma: no cover order = 2**8 - 2 counts = dict([(i, 0) for i in range(1, order)]) assert 0 not in counts assert order not in counts for i in range(1000000): seed = "seed-%d" % i n = util.randrange_from_seed__trytryagain(seed, order) counts[n] += 1 # this technique should use the full range self.assertTrue(counts[order - 1]) for i in range(1, order): print_("%3d: %s" % (i, "*" * (counts[i] // 100))) class RFC6979(unittest.TestCase): # https://tools.ietf.org/html/rfc6979#appendix-A.1 def _do(self, generator, secexp, hsh, hash_func, expected): actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh) self.assertEqual(expected, actual) def test_SECP256k1(self): """RFC doesn't contain test vectors for SECP256k1 used in bitcoin. This vector has been computed by Golang reference implementation instead.""" self._do( generator=SECP256k1.generator, secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16), hsh=sha256(b("sample")).digest(), hash_func=sha256, expected=int( "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16, ), ) def test_SECP256k1_2(self): self._do( generator=SECP256k1.generator, secexp=int( "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16, ), hsh=sha256(b("sample")).digest(), hash_func=sha256, expected=int( "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16, ), ) def test_SECP256k1_3(self): self._do( generator=SECP256k1.generator, secexp=0x1, hsh=sha256(b("Satoshi Nakamoto")).digest(), hash_func=sha256, expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15, ) def test_SECP256k1_4(self): self._do( generator=SECP256k1.generator, secexp=0x1, hsh=sha256( b( "All those moments will be lost in time, like tears in rain. Time to die..." ) ).digest(), hash_func=sha256, expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3, ) def test_SECP256k1_5(self): self._do( generator=SECP256k1.generator, secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140, hsh=sha256(b("Satoshi Nakamoto")).digest(), hash_func=sha256, expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90, ) def test_SECP256k1_6(self): self._do( generator=SECP256k1.generator, secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181, hsh=sha256(b("Alan Turing")).digest(), hash_func=sha256, expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1, ) def test_1(self): # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979 self._do( generator=Point( None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16), ), secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16), hsh=unhexlify( b( "AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" ) ), hash_func=sha256, expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16), ) def test_2(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha1(b("sample")).digest(), hash_func=sha1, expected=int( "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16 ), ) def test_3(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha256(b("sample")).digest(), hash_func=sha256, expected=int( "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16 ), ) def test_4(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha512(b("sample")).digest(), hash_func=sha512, expected=int( "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16 ), ) def test_5(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha1(b("test")).digest(), hash_func=sha1, expected=int( "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16 ), ) def test_6(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha256(b("test")).digest(), hash_func=sha256, expected=int( "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16 ), ) def test_7(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), hsh=sha512(b("test")).digest(), hash_func=sha512, expected=int( "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16 ), ) def test_8(self): self._do( generator=NIST521p.generator, secexp=int( "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), hsh=sha1(b("sample")).digest(), hash_func=sha1, expected=int( "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16, ), ) def test_9(self): self._do( generator=NIST521p.generator, secexp=int( "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), hsh=sha256(b("sample")).digest(), hash_func=sha256, expected=int( "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16, ), ) def test_10(self): self._do( generator=NIST521p.generator, secexp=int( "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), hsh=sha512(b("test")).digest(), hash_func=sha512, expected=int( "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16, ), ) class ECDH(unittest.TestCase): def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z): qA = dA * generator qB = dB * generator Z = dA * qB self.assertEqual(Point(curve, x_qA, y_qA), qA) self.assertEqual(Point(curve, x_qB, y_qB), qB) self.assertTrue( (dA * qB) == (dA * dB * generator) == (dB * dA * generator) == (dB * qA) ) self.assertEqual(Point(curve, x_Z, y_Z), Z) class RFC6932(ECDH): # https://tools.ietf.org/html/rfc6932#appendix-A.1 def test_brainpoolP224r1(self): self._do( curve=curve_brainpoolp224r1, generator=BRAINPOOLP224r1.generator, dA=int( "7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E", 16 ), x_qA=int( "B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2", 16 ), y_qA=int( "46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94", 16 ), dB=int( "63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB", 16 ), x_qB=int( "2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E", 16 ), y_qB=int( "9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B", 16 ), x_Z=int( "312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB", 16 ), y_Z=int( "6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98", 16 ), ) def test_brainpoolP256r1(self): self._do( curve=curve_brainpoolp256r1, generator=BRAINPOOLP256r1.generator, dA=int( "041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE" "49699", 16, ), x_qA=int( "78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C" "CC0D206", 16, ), y_qA=int( "A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987" "B05F92B", 16, ), dB=int( "06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D" "804D3", 16, ), x_qB=int( "8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635" "341F0DB", 16, ), y_qB=int( "148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2" "4FDE40D", 16, ), x_Z=int( "05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2" "E23708", 16, ), y_Z=int( "6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6" "FD134C", 16, ), ) def test_brainpoolP384r1(self): self._do( curve=curve_brainpoolp384r1, generator=BRAINPOOLP384r1.generator, dA=int( "014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D" "828D97E095884CA72B73FDABD5910DF0FA76A", 16, ), x_qA=int( "45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277" "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E", 16, ), y_qA=int( "8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3" "2DC530A2CD89C859BB4B4B768497F49AB8CC859", 16, ), dB=int( "6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01" "5C90095B976F3529957506E1224A861711D54", 16, ), x_qB=int( "01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383" "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8", 16, ), y_qB=int( "3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026" "8EE47487ED8044858D31D848F7A95C635A347AC", 16, ), x_Z=int( "04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A" "2F370A1F98D3FA73FD0C0747C632E12F1423EC", 16, ), y_Z=int( "7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB" "5D05085F5A01A9382D05BF72D96698FE3FF64E", 16, ), ) def test_brainpoolP512r1(self): self._do( curve=curve_brainpoolp512r1, generator=BRAINPOOLP512r1.generator, dA=int( "636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595" "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287" "4CCE6FE333", 16, ), x_qA=int( "0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC" "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151" "AFC3EF0971CCDA", 16, ), y_qA=int( "A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C" "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97" "8EFC7343EA642E", 16, ), dB=int( "0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38" "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17" "99B86E5C8B", 16, ), x_qB=int( "5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4" "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC" "E739E11720D323", 16, ), y_qB=int( "96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49" "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719" "54EE1BA35E04BE", 16, ), x_Z=int( "1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362" "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2" "10E5811B191E", 16, ), y_Z=int( "2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109" "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A" "4F9FE9EAABA6", 16, ), ) class RFC7027(ECDH): # https://tools.ietf.org/html/rfc7027#appendix-A def test_brainpoolP256r1(self): self._do( curve=curve_brainpoolp256r1, generator=BRAINPOOLP256r1.generator, dA=int( "81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398" "04F1D", 16, ), x_qA=int( "44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E" "3100BE5", 16, ), y_qA=int( "8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E" "B089BDC", 16, ), dB=int( "55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D" "76BD3", 16, ), x_qB=int( "8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F" "1B39F7B", 16, ), y_qB=int( "990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065" "47CEC6A", 16, ), x_Z=int( "89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A" "18BF2B", 16, ), y_Z=int( "49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963" "2504DE", 16, ), ) def test_brainpoolP384r1(self): self._do( curve=curve_brainpoolp384r1, generator=BRAINPOOLP384r1.generator, dA=int( "1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B" "D65D6F15EB5D1EE1610DF870795143627D042", 16, ), x_qA=int( "68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793" "588F885AB698C852D4A6E77A252D6380FCAF068", 16, ), y_qA=int( "55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2" "0607493E0D038FF2FD30C2AB67D15C85F7FAA59", 16, ), dB=int( "032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7" "4E01F8BA5E0324309DB6A9831497ABAC96670", 16, ), x_qB=int( "4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D" "19DC8CE6AD18E404B15738B2086DF37E71D1EB4", 16, ), y_qB=int( "62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E" "9185329B5B275903D192F8D4E1F32FE9CC78C48", 16, ), x_Z=int( "0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC" "E239BBADF6403715C35D4FB2A5444F575D4F42", 16, ), y_Z=int( "0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA" "E9E598157290F8756066975F1DB34B2324B7BD", 16, ), ) def test_brainpoolP512r1(self): self._do( curve=curve_brainpoolp512r1, generator=BRAINPOOLP512r1.generator, dA=int( "16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8" "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C" "AD57665422", 16, ), x_qA=int( "0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28" "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044" "36D11640FD09FD", 16, ), y_qA=int( "72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4" "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5" "E82A6AD147FDE7", 16, ), dB=int( "230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49" "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050" "3666B25429", 16, ), x_qB=int( "9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31" "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D" "EDA55A5473199F", 16, ), y_qB=int( "2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB" "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194" "512B71876285FA", 16, ), x_Z=int( "A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226" "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1" "454B21C4CD1F", 16, ), y_Z=int( "7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8" "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83" "2BE6A26680A2", 16, ), ) # https://tools.ietf.org/html/rfc4754#page-5 @pytest.mark.parametrize( "w, gwx, gwy, k, msg, md, r, s, curve", [ pytest.param( "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F", "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970", "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D", "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE", b"abc", sha256, "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C", "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315", NIST256p, id="ECDSA-256", ), pytest.param( "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF" "62E528C38B2A81B35309668D73524D9F", "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA" "5CE10C9DFEC21797415710721F437922", "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD" "3D383B91C5E7EDAA2B714CC99D5743CA", "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B" "854D7FA992F934D927376285E63414FA", b"abc", sha384, "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E" "08E07C9C63F2D21A07DCB56A6AF56EB3", "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1" "CBB9F516CE0FA7D2FF630863A00E8B9F", NIST384p, id="ECDSA-384", ), pytest.param( "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491" "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37" "5FA1", "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F" "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190" "52A8", "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317" "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA" "E643", "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9" "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95" "6C2F", b"abc", sha512, "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339" "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055" "2251", "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4" "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647" "2660", NIST521p, id="ECDSA-521", ), ], ) def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve): sk = SigningKey.from_string(unhexlify(w), curve) vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve) assert sk.verifying_key == vk sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16)) assert sig == (unhexlify(r), unhexlify(s)) assert vk.verify(sig, msg, md, sigdecode_strings) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611246881.0 ecdsa-0.18.0/src/ecdsa/test_rw_lock.py0000664005075200507520000001555514002326441017177 0ustar00hkariohkario# Copyright Mateusz Kobos, (c) 2011 # https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ # released under the MIT licence try: import unittest2 as unittest except ImportError: import unittest import threading import time import copy from ._rwlock import RWLock class Writer(threading.Thread): def __init__( self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write ): """ @param buffer_: common buffer_ shared by the readers and writers @type buffer_: list @type rw_lock: L{RWLock} @param init_sleep_time: sleep time before doing any action @type init_sleep_time: C{float} @param sleep_time: sleep time while in critical section @type sleep_time: C{float} @param to_write: data that will be appended to the buffer """ threading.Thread.__init__(self) self.__buffer = buffer_ self.__rw_lock = rw_lock self.__init_sleep_time = init_sleep_time self.__sleep_time = sleep_time self.__to_write = to_write self.entry_time = None """Time of entry to the critical section""" self.exit_time = None """Time of exit from the critical section""" def run(self): time.sleep(self.__init_sleep_time) self.__rw_lock.writer_acquire() self.entry_time = time.time() time.sleep(self.__sleep_time) self.__buffer.append(self.__to_write) self.exit_time = time.time() self.__rw_lock.writer_release() class Reader(threading.Thread): def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time): """ @param buffer_: common buffer shared by the readers and writers @type buffer_: list @type rw_lock: L{RWLock} @param init_sleep_time: sleep time before doing any action @type init_sleep_time: C{float} @param sleep_time: sleep time while in critical section @type sleep_time: C{float} """ threading.Thread.__init__(self) self.__buffer = buffer_ self.__rw_lock = rw_lock self.__init_sleep_time = init_sleep_time self.__sleep_time = sleep_time self.buffer_read = None """a copy of a the buffer read while in critical section""" self.entry_time = None """Time of entry to the critical section""" self.exit_time = None """Time of exit from the critical section""" def run(self): time.sleep(self.__init_sleep_time) self.__rw_lock.reader_acquire() self.entry_time = time.time() time.sleep(self.__sleep_time) self.buffer_read = copy.deepcopy(self.__buffer) self.exit_time = time.time() self.__rw_lock.reader_release() class RWLockTestCase(unittest.TestCase): def test_readers_nonexclusive_access(self): (buffer_, rw_lock, threads) = self.__init_variables() threads.append(Reader(buffer_, rw_lock, 0, 0)) threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1)) threads.append(Reader(buffer_, rw_lock, 0.3, 0.3)) threads.append(Reader(buffer_, rw_lock, 0.5, 0)) self.__start_and_join_threads(threads) ## The third reader should enter after the second one but it should ## exit before the second one exits ## (i.e. the readers should be in the critical section ## at the same time) self.assertEqual([], threads[0].buffer_read) self.assertEqual([1], threads[2].buffer_read) self.assertEqual([1], threads[3].buffer_read) self.assertTrue(threads[1].exit_time <= threads[2].entry_time) self.assertTrue(threads[2].entry_time <= threads[3].entry_time) self.assertTrue(threads[3].exit_time < threads[2].exit_time) def test_writers_exclusive_access(self): (buffer_, rw_lock, threads) = self.__init_variables() threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1)) threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2)) threads.append(Reader(buffer_, rw_lock, 0.2, 0)) self.__start_and_join_threads(threads) ## The second writer should wait for the first one to exit self.assertEqual([1, 2], threads[2].buffer_read) self.assertTrue(threads[0].exit_time <= threads[1].entry_time) self.assertTrue(threads[1].exit_time <= threads[2].exit_time) def test_writer_priority(self): (buffer_, rw_lock, threads) = self.__init_variables() threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) threads.append(Reader(buffer_, rw_lock, 0.1, 0.4)) threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2)) threads.append(Reader(buffer_, rw_lock, 0.3, 0)) threads.append(Reader(buffer_, rw_lock, 0.3, 0)) self.__start_and_join_threads(threads) ## The second writer should go before the second and the third reader self.assertEqual([1], threads[1].buffer_read) self.assertEqual([1, 2], threads[3].buffer_read) self.assertEqual([1, 2], threads[4].buffer_read) self.assertTrue(threads[0].exit_time < threads[1].entry_time) self.assertTrue(threads[1].exit_time <= threads[2].entry_time) self.assertTrue(threads[2].exit_time <= threads[3].entry_time) self.assertTrue(threads[2].exit_time <= threads[4].entry_time) def test_many_writers_priority(self): (buffer_, rw_lock, threads) = self.__init_variables() threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) threads.append(Reader(buffer_, rw_lock, 0.1, 0.6)) threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2)) threads.append(Reader(buffer_, rw_lock, 0.3, 0)) threads.append(Reader(buffer_, rw_lock, 0.4, 0)) threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3)) self.__start_and_join_threads(threads) ## The two last writers should go first -- after the first reader and ## before the second and the third reader self.assertEqual([1], threads[1].buffer_read) self.assertEqual([1, 2, 3], threads[3].buffer_read) self.assertEqual([1, 2, 3], threads[4].buffer_read) self.assertTrue(threads[0].exit_time < threads[1].entry_time) self.assertTrue(threads[1].exit_time <= threads[2].entry_time) self.assertTrue(threads[1].exit_time <= threads[5].entry_time) self.assertTrue(threads[2].exit_time <= threads[3].entry_time) self.assertTrue(threads[2].exit_time <= threads[4].entry_time) self.assertTrue(threads[5].exit_time <= threads[3].entry_time) self.assertTrue(threads[5].exit_time <= threads[4].entry_time) @staticmethod def __init_variables(): buffer_ = [] rw_lock = RWLock() threads = [] return (buffer_, rw_lock, threads) @staticmethod def __start_and_join_threads(threads): for t in threads: t.start() for t in threads: t.join() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1627306025.0 ecdsa-0.18.0/src/ecdsa/test_sha3.py0000664005075200507520000000571614077534051016405 0ustar00hkariohkariotry: import unittest2 as unittest except ImportError: import unittest import pytest try: from gmpy2 import mpz GMPY = True except ImportError: try: from gmpy import mpz GMPY = True except ImportError: GMPY = False from ._sha3 import shake_256 from ._compat import bytes_to_int, int_to_bytes B2I_VECTORS = [ (b"\x00\x01", "big", 1), (b"\x00\x01", "little", 0x0100), (b"", "big", 0), (b"\x00", "little", 0), ] @pytest.mark.parametrize("bytes_in,endian,int_out", B2I_VECTORS) def test_bytes_to_int(bytes_in, endian, int_out): out = bytes_to_int(bytes_in, endian) assert out == int_out class TestBytesToInt(unittest.TestCase): def test_bytes_to_int_wrong_endian(self): with self.assertRaises(ValueError): bytes_to_int(b"\x00", "middle") def test_int_to_bytes_wrong_endian(self): with self.assertRaises(ValueError): int_to_bytes(0, byteorder="middle") @pytest.mark.skipif(GMPY == False, reason="requites gmpy or gmpy2") def test_int_to_bytes_with_gmpy(): assert int_to_bytes(mpz(1)) == b"\x01" I2B_VECTORS = [ (0, None, "big", b""), (0, 1, "big", b"\x00"), (1, None, "big", b"\x01"), (0x0100, None, "little", b"\x00\x01"), (0x0100, 4, "little", b"\x00\x01\x00\x00"), (1, 4, "big", b"\x00\x00\x00\x01"), ] @pytest.mark.parametrize("int_in,length,endian,bytes_out", I2B_VECTORS) def test_int_to_bytes(int_in, length, endian, bytes_out): out = int_to_bytes(int_in, length, endian) assert out == bytes_out SHAKE_256_VECTORS = [ ( b"Message.", 32, b"\x78\xa1\x37\xbb\x33\xae\xe2\x72\xb1\x02\x4f\x39\x43\xe5\xcf\x0c" b"\x4e\x9c\x72\x76\x2e\x34\x4c\xf8\xf9\xc3\x25\x9d\x4f\x91\x2c\x3a", ), ( b"", 32, b"\x46\xb9\xdd\x2b\x0b\xa8\x8d\x13\x23\x3b\x3f\xeb\x74\x3e\xeb\x24" b"\x3f\xcd\x52\xea\x62\xb8\x1b\x82\xb5\x0c\x27\x64\x6e\xd5\x76\x2f", ), ( b"message", 32, b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75", ), ( b"message", 16, b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51", ), ( b"message", 64, b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75" b"\x30\xd6\xba\x2a\x46\x65\xf1\x9d\xf0\x62\x25\xb1\x26\xd1\x3e\xed" b"\x91\xd5\x0d\xe7\xb9\xcb\x65\xf3\x3a\x46\xae\xd3\x6c\x7d\xc5\xe8", ), ( b"A" * 1024, 32, b"\xa5\xef\x7e\x30\x8b\xe8\x33\x64\xe5\x9c\xf3\xb5\xf3\xba\x20\xa3" b"\x5a\xe7\x30\xfd\xbc\x33\x11\xbf\x83\x89\x50\x82\xb4\x41\xe9\xb3", ), ] @pytest.mark.parametrize("msg,olen,ohash", SHAKE_256_VECTORS) def test_shake_256(msg, olen, ohash): out = shake_256(msg, olen) assert out == bytearray(ohash) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/src/ecdsa/util.py0000664005075200507520000003443214221640203015446 0ustar00hkariohkariofrom __future__ import division import os import math import binascii import sys from hashlib import sha256 from six import PY2, int2byte, b, next from . import der from ._compat import normalise_bytes # RFC5480: # The "unrestricted" algorithm identifier is: # id-ecPublicKey OBJECT IDENTIFIER ::= { # iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 } oid_ecPublicKey = (1, 2, 840, 10045, 2, 1) encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey) # RFC5480: # The ECDH algorithm uses the following object identifier: # id-ecDH OBJECT IDENTIFIER ::= { # iso(1) identified-organization(3) certicom(132) schemes(1) # ecdh(12) } oid_ecDH = (1, 3, 132, 1, 12) # RFC5480: # The ECMQV algorithm uses the following object identifier: # id-ecMQV OBJECT IDENTIFIER ::= { # iso(1) identified-organization(3) certicom(132) schemes(1) # ecmqv(13) } oid_ecMQV = (1, 3, 132, 1, 13) if sys.version_info >= (3,): # pragma: no branch def entropy_to_bits(ent_256): """Convert a bytestring to string of 0's and 1's""" return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8) else: def entropy_to_bits(ent_256): """Convert a bytestring to string of 0's and 1's""" return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256) if sys.version_info < (2, 7): # pragma: no branch # Can't add a method to a built-in type so we are stuck with this def bit_length(x): return len(bin(x)) - 2 else: def bit_length(x): return x.bit_length() or 1 def orderlen(order): return (1 + len("%x" % order)) // 2 # bytes def randrange(order, entropy=None): """Return a random integer k such that 1 <= k < order, uniformly distributed across that range. Worst case should be a mean of 2 loops at (2**k)+2. Note that this function is not declared to be forwards-compatible: we may change the behavior in future releases. The entropy= argument (which should get a callable that behaves like os.urandom) can be used to achieve stability within a given release (for repeatable unit tests), but should not be used as a long-term-compatible key generation algorithm. """ assert order > 1 if entropy is None: entropy = os.urandom upper_2 = bit_length(order - 2) upper_256 = upper_2 // 8 + 1 while True: # I don't think this needs a counter with bit-wise randrange ent_256 = entropy(upper_256) ent_2 = entropy_to_bits(ent_256) rand_num = int(ent_2[:upper_2], base=2) + 1 if 0 < rand_num < order: return rand_num class PRNG: # this returns a callable which, when invoked with an integer N, will # return N pseudorandom bytes. Note: this is a short-term PRNG, meant # primarily for the needs of randrange_from_seed__trytryagain(), which # only needs to run it a few times per seed. It does not provide # protection against state compromise (forward security). def __init__(self, seed): self.generator = self.block_generator(seed) def __call__(self, numbytes): a = [next(self.generator) for i in range(numbytes)] if PY2: # pragma: no branch return "".join(a) else: return bytes(a) def block_generator(self, seed): counter = 0 while True: for byte in sha256( ("prng-%d-%s" % (counter, seed)).encode() ).digest(): yield byte counter += 1 def randrange_from_seed__overshoot_modulo(seed, order): # hash the data, then turn the digest into a number in [1,order). # # We use David-Sarah Hopwood's suggestion: turn it into a number that's # sufficiently larger than the group order, then modulo it down to fit. # This should give adequate (but not perfect) uniformity, and simple # code. There are other choices: try-try-again is the main one. base = PRNG(seed)(2 * orderlen(order)) number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1 assert 1 <= number < order, (1, number, order) return number def lsb_of_ones(numbits): return (1 << numbits) - 1 def bits_and_bytes(order): bits = int(math.log(order - 1, 2) + 1) bytes = bits // 8 extrabits = bits % 8 return bits, bytes, extrabits # the following randrange_from_seed__METHOD() functions take an # arbitrarily-sized secret seed and turn it into a number that obeys the same # range limits as randrange() above. They are meant for deriving consistent # signing keys from a secret rather than generating them randomly, for # example a protocol in which three signing keys are derived from a master # secret. You should use a uniformly-distributed unguessable seed with about # curve.baselen bytes of entropy. To use one, do this: # seed = os.urandom(curve.baselen) # or other starting point # secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order) # sk = SigningKey.from_secret_exponent(secexp, curve) def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256): # hash the seed, then turn the digest into a number in [1,order), but # don't worry about trying to uniformly fill the range. This will lose, # on average, four bits of entropy. bits, _bytes, extrabits = bits_and_bytes(order) if extrabits: _bytes += 1 base = hashmod(seed).digest()[:_bytes] base = "\x00" * (_bytes - len(base)) + base number = 1 + int(binascii.hexlify(base), 16) assert 1 <= number < order return number def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256): # like string_to_randrange_truncate_bytes, but only lose an average of # half a bit bits = int(math.log(order - 1, 2) + 1) maxbytes = (bits + 7) // 8 base = hashmod(seed).digest()[:maxbytes] base = "\x00" * (maxbytes - len(base)) + base topbits = 8 * maxbytes - bits if topbits: base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:] number = 1 + int(binascii.hexlify(base), 16) assert 1 <= number < order return number def randrange_from_seed__trytryagain(seed, order): # figure out exactly how many bits we need (rounded up to the nearest # bit), so we can reduce the chance of looping to less than 0.5 . This is # specified to feed from a byte-oriented PRNG, and discards the # high-order bits of the first byte as necessary to get the right number # of bits. The average number of loops will range from 1.0 (when # order=2**k-1) to 2.0 (when order=2**k+1). assert order > 1 bits, bytes, extrabits = bits_and_bytes(order) generate = PRNG(seed) while True: extrabyte = b("") if extrabits: extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits)) guess = string_to_number(extrabyte + generate(bytes)) + 1 if 1 <= guess < order: return guess def number_to_string(num, order): l = orderlen(order) fmt_str = "%0" + str(2 * l) + "x" string = binascii.unhexlify((fmt_str % num).encode()) assert len(string) == l, (len(string), l) return string def number_to_string_crop(num, order): l = orderlen(order) fmt_str = "%0" + str(2 * l) + "x" string = binascii.unhexlify((fmt_str % num).encode()) return string[:l] def string_to_number(string): return int(binascii.hexlify(string), 16) def string_to_number_fixedlen(string, order): l = orderlen(order) assert len(string) == l, (len(string), l) return int(binascii.hexlify(string), 16) # these methods are useful for the sigencode= argument to SK.sign() and the # sigdecode= argument to VK.verify(), and control how the signature is packed # or unpacked. def sigencode_strings(r, s, order): r_str = number_to_string(r, order) s_str = number_to_string(s, order) return (r_str, s_str) def sigencode_string(r, s, order): """ Encode the signature to raw format (:term:`raw encoding`) It's expected that this function will be used as a `sigencode=` parameter in :func:`ecdsa.keys.SigningKey.sign` method. :param int r: first parameter of the signature :param int s: second parameter of the signature :param int order: the order of the curve over which the signature was computed :return: raw encoding of ECDSA signature :rtype: bytes """ # for any given curve, the size of the signature numbers is # fixed, so just use simple concatenation r_str, s_str = sigencode_strings(r, s, order) return r_str + s_str def sigencode_der(r, s, order): """ Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. Encodes the signature to the following :term:`ASN.1` structure:: Ecdsa-Sig-Value ::= SEQUENCE { r INTEGER, s INTEGER } It's expected that this function will be used as a `sigencode=` parameter in :func:`ecdsa.keys.SigningKey.sign` method. :param int r: first parameter of the signature :param int s: second parameter of the signature :param int order: the order of the curve over which the signature was computed :return: DER encoding of ECDSA signature :rtype: bytes """ return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) # canonical versions of sigencode methods # these enforce low S values, by negating the value (modulo the order) if # above order/2 see CECKey::Sign() # https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214 def sigencode_strings_canonize(r, s, order): if s > order / 2: s = order - s return sigencode_strings(r, s, order) def sigencode_string_canonize(r, s, order): if s > order / 2: s = order - s return sigencode_string(r, s, order) def sigencode_der_canonize(r, s, order): if s > order / 2: s = order - s return sigencode_der(r, s, order) class MalformedSignature(Exception): """ Raised by decoding functions when the signature is malformed. Malformed in this context means that the relevant strings or integers do not match what a signature over provided curve would create. Either because the byte strings have incorrect lengths or because the encoded values are too large. """ pass def sigdecode_string(signature, order): """ Decoder for :term:`raw encoding` of ECDSA signatures. raw encoding is a simple concatenation of the two integers that comprise the signature, with each encoded using the same amount of bytes depending on curve size/order. It's expected that this function will be used as the `sigdecode=` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param signature: encoded signature :type signature: bytes like object :param order: order of the curve over which the signature was computed :type order: int :raises MalformedSignature: when the encoding of the signature is invalid :return: tuple with decoded 'r' and 's' values of signature :rtype: tuple of ints """ signature = normalise_bytes(signature) l = orderlen(order) if not len(signature) == 2 * l: raise MalformedSignature( "Invalid length of signature, expected {0} bytes long, " "provided string is {1} bytes long".format(2 * l, len(signature)) ) r = string_to_number_fixedlen(signature[:l], order) s = string_to_number_fixedlen(signature[l:], order) return r, s def sigdecode_strings(rs_strings, order): """ Decode the signature from two strings. First string needs to be a big endian encoding of 'r', second needs to be a big endian encoding of the 's' parameter of an ECDSA signature. It's expected that this function will be used as the `sigdecode=` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param list rs_strings: list of two bytes-like objects, each encoding one parameter of signature :param int order: order of the curve over which the signature was computed :raises MalformedSignature: when the encoding of the signature is invalid :return: tuple with decoded 'r' and 's' values of signature :rtype: tuple of ints """ if not len(rs_strings) == 2: raise MalformedSignature( "Invalid number of strings provided: {0}, expected 2".format( len(rs_strings) ) ) (r_str, s_str) = rs_strings r_str = normalise_bytes(r_str) s_str = normalise_bytes(s_str) l = orderlen(order) if not len(r_str) == l: raise MalformedSignature( "Invalid length of first string ('r' parameter), " "expected {0} bytes long, provided string is {1} " "bytes long".format(l, len(r_str)) ) if not len(s_str) == l: raise MalformedSignature( "Invalid length of second string ('s' parameter), " "expected {0} bytes long, provided string is {1} " "bytes long".format(l, len(s_str)) ) r = string_to_number_fixedlen(r_str, order) s = string_to_number_fixedlen(s_str, order) return r, s def sigdecode_der(sig_der, order): """ Decoder for DER format of ECDSA signatures. DER format of signature is one that uses the :term:`ASN.1` :term:`DER` rules to encode it as a sequence of two integers:: Ecdsa-Sig-Value ::= SEQUENCE { r INTEGER, s INTEGER } It's expected that this function will be used as as the `sigdecode=` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param sig_der: encoded signature :type sig_der: bytes like object :param order: order of the curve over which the signature was computed :type order: int :raises UnexpectedDER: when the encoding of signature is invalid :return: tuple with decoded 'r' and 's' values of signature :rtype: tuple of ints """ sig_der = normalise_bytes(sig_der) # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) rs_strings, empty = der.remove_sequence(sig_der) if empty != b"": raise der.UnexpectedDER( "trailing junk after DER sig: %s" % binascii.hexlify(empty) ) r, rest = der.remove_integer(rs_strings) s, empty = der.remove_integer(rest) if empty != b"": raise der.UnexpectedDER( "trailing junk after DER numbers: %s" % binascii.hexlify(empty) ) return r, s ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1657371717.7414873 ecdsa-0.18.0/src/ecdsa.egg-info/0000775005075200507520000000000014262276106015617 5ustar00hkariohkario././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371717.0 ecdsa-0.18.0/src/ecdsa.egg-info/PKG-INFO0000664005075200507520000010373614262276105016725 0ustar00hkariohkarioMetadata-Version: 2.1 Name: ecdsa Version: 0.18.0 Summary: ECDSA cryptographic signature library (pure python) Home-page: http://github.com/tlsfuzzer/python-ecdsa Author: Brian Warner Author-email: warner@lothar.com License: MIT Description: # Pure-Python ECDSA and ECDH [![Build Status](https://github.com/tlsfuzzer/python-ecdsa/workflows/GitHub%20CI/badge.svg?branch=master)](https://github.com/tlsfuzzer/python-ecdsa/actions?query=workflow%3A%22GitHub+CI%22+branch%3Amaster) [![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) [![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) ![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/context:python) [![Total alerts](https://img.shields.io/lgtm/alerts/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/alerts/) [![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) ![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) with support for ECDSA (Elliptic Curve Digital Signature Algorithm), EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH (Elliptic Curve Diffie-Hellman), implemented purely in Python, released under the MIT license. With this library, you can quickly create key pairs (signing key and verifying key), sign messages, and verify the signatures. You can also agree on a shared secret key based on exchanged public keys. The keys and signatures are very short, making them easy to handle and incorporate into other protocols. **NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** ## Features This library provides key generation, signing, verifying, and shared secret derivation for five popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, as known by the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, `secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the 256-bit curve `secp256k1` used by Bitcoin. There is also support for the regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The "short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, `brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, `brainpoolP512r1`. Few of the small curves from SEC standard are also included (mainly to speed-up testing of the library), those are: `secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. Key generation, siging and verifying is also supported for Ed25519 and Ed448 curves. No other curves are included, but it is not too hard to add support for more curves over prime fields. ## Dependencies This library uses only Python and the 'six' package. It is compatible with Python 2.6, 2.7, and 3.3+. It also supports execution on alternative implementations like pypy and pypy3. If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. Either of them can be installed after this library is installed, `python-ecdsa` will detect their presence on start-up and use them automatically. You should prefer `gmpy2` on Python3 for optimal performance. To run the OpenSSL compatibility tests, the 'openssl' tool must be in your `PATH`. This release has been tested successfully against OpenSSL 0.9.8o, 1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). ## Installation This library is available on PyPI, it's recommended to install it using `pip`: ``` pip install ecdsa ``` In case higher performance is wanted and using native code is not a problem, it's possible to specify installation together with `gmpy2`: ``` pip install ecdsa[gmpy2] ``` or (slower, legacy option): ``` pip install ecdsa[gmpy] ``` ## Speed The following table shows how long this library takes to generate key pairs (`keygen`), to sign data (`sign`), to verify those signatures (`verify`), to derive a shared secret (`ecdh`), and to verify the signatures with no key-specific precomputation (`no PC verify`). All those values are in seconds. For convenience, the inverses of those values are also provided: how many keys per second can be generated (`keygen/s`), how many signatures can be made per second (`sign/s`), how many signatures can be verified per second (`verify/s`), how many shared secrets can be derived per second (`ecdh/s`), and how many signatures with no key specific precomputation can be verified per second (`no PC verify/s`). The size of raw signature (generally the smallest the way a signature can be encoded) is also provided in the `siglen` column. Use `tox -e speed` to generate this table on your own computer. On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 ecdh ecdh/s NIST192p: 0.00104s 964.89 NIST224p: 0.00134s 748.63 NIST256p: 0.00170s 587.08 NIST384p: 0.00352s 283.90 NIST521p: 0.00717s 139.51 SECP256k1: 0.00154s 648.40 BRAINPOOLP160r1: 0.00082s 1220.70 BRAINPOOLP192r1: 0.00105s 956.75 BRAINPOOLP224r1: 0.00136s 734.52 BRAINPOOLP256r1: 0.00178s 563.32 BRAINPOOLP320r1: 0.00252s 397.23 BRAINPOOLP384r1: 0.00376s 266.27 BRAINPOOLP512r1: 0.00733s 136.35 SECP112r1: 0.00046s 2180.40 SECP112r2: 0.00045s 2229.14 SECP128r1: 0.00054s 1868.15 SECP160r1: 0.00080s 1243.98 ``` To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. On the same machine I'm getting the following performance with `gmpy2`: ``` siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 ecdh ecdh/s NIST192p: 0.00050s 1985.70 NIST224p: 0.00066s 1524.16 NIST256p: 0.00071s 1413.07 NIST384p: 0.00127s 788.89 NIST521p: 0.00230s 434.85 SECP256k1: 0.00071s 1409.95 BRAINPOOLP160r1: 0.00042s 2374.65 BRAINPOOLP192r1: 0.00051s 1960.01 BRAINPOOLP224r1: 0.00066s 1518.37 BRAINPOOLP256r1: 0.00071s 1399.90 BRAINPOOLP320r1: 0.00100s 997.21 BRAINPOOLP384r1: 0.00129s 777.51 BRAINPOOLP512r1: 0.00210s 475.99 SECP112r1: 0.00022s 4457.70 SECP112r2: 0.00024s 4252.33 SECP128r1: 0.00028s 3589.31 SECP160r1: 0.00043s 2305.02 ``` (there's also `gmpy` version, execute it using `tox -e speedgmpy`) For comparison, a highly optimised implementation (including curve-specific assembly for some curves), like the one in OpenSSL 1.1.1d, provides the following performance numbers on the same machine. Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: ``` sign verify sign/s verify/s 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 sign verify sign/s verify/s 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 op op/s 192 bits ecdh (nistp192) 0.0002s 4853.4 224 bits ecdh (nistp224) 0.0001s 15252.1 256 bits ecdh (nistp256) 0.0001s 18436.3 384 bits ecdh (nistp384) 0.0008s 1292.7 521 bits ecdh (nistp521) 0.0003s 2884.7 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 ``` Keys and signature can be serialized in different ways (see Usage, below). For a NIST192p key, the three basic representations require strings of the following lengths (in bytes): to_string: signkey= 24, verifykey= 48, signature=48 compressed: signkey=n/a, verifykey= 25, signature=n/a DER: signkey=106, verifykey= 80, signature=55 PEM: signkey=278, verifykey=162, (no support for PEM signatures) ## History In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a [message to sci.crypt][1], available from his [download site][2]. In 2010, Brian Warner wrote a wrapper around this code, to make it a bit easier and safer to use. In 2020, Hubert Kario included an implementation of elliptic curve cryptography that uses Jacobian coordinates internally, improving performance about 20-fold. You are looking at the README for this wrapper. [1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html [2]: http://webpages.charter.net/curryfans/peter/downloads.html ## Testing To run the full test suite, do this: tox -e coverage On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. The test suite uses [`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some inherent variability in the test suite execution time. One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if you want to test compatibility with it (if OpenSSL is missing, too old, or doesn't support all the curves supported in upstream releases you will see skipped tests in the above `coverage` run). ## Security This library was not designed with security in mind. If you are processing data that needs to be protected we suggest you use a quality wrapper around OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such a wrapper. The primary use-case of this library is as a portable library for interoperability testing and as a teaching tool. **This library does not protect against side-channel attacks.** Do not allow attackers to measure how long it takes you to generate a key pair or sign a message. Do not allow attackers to run code on the same physical machine when key pair generation or signing is taking place (this includes virtual machines). Do not allow attackers to measure how much power your computer uses while generating the key pair or signing a message. Do not allow attackers to measure RF interference coming from your computer while generating a key pair or signing a message. Note: just loading the private key will cause key pair generation. Other operations or attack vectors may also be vulnerable to attacks. **For a sophisticated attacker observing just one operation with a private key will be sufficient to completely reconstruct the private key**. Please also note that any Pure-python cryptographic library will be vulnerable to the same side-channel attacks. This is because Python does not provide side-channel secure primitives (with the exception of [`hmac.compare_digest()`][3]), making side-channel secure programming impossible. This library depends upon a strong source of random numbers. Do not use it on a system where `os.urandom()` does not provide cryptographically secure random numbers. [3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest ## Usage You start by creating a `SigningKey`. You can use this to sign data, by passing in data as a byte string and getting back the signature (also a byte string). You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. The `VerifyingKey` can be used to verify a signature, by passing it both the data string and the signature byte string: it either returns True or raises `BadSignatureError`. ```python from ecdsa import SigningKey sk = SigningKey.generate() # uses NIST192p vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like NIST192p (the default one). Longer curves are more secure, but take longer to use, and result in longer keys and signatures. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` The `SigningKey` can be serialized into several different formats: the shortest is to call `s=sk.to_string()`, and then re-create it with `SigningKey.from_string(s, curve)` . This short form does not record the curve, so you must be sure to pass to `from_string()` the same curve you used for the original key. The short form of a NIST192p-based signing key is just 24 bytes long. If a point encoding is invalid or it does not lie on the specified curve, `from_string()` will raise `MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_string = sk.to_string() sk2 = SigningKey.from_string(sk_string, curve=NIST384p) print(sk_string.hex()) print(sk2.to_string().hex()) ``` Note: while the methods are called `to_string()` the type they return is actually `bytes`, the "string" part is leftover from Python 2. `sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored `"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format is a shorter binary form of the same data. `SigningKey.from_pem()/.from_der()` will undo this serialization. These formats include the curve name, so you do not need to pass in a curve identifier to the deserializer. In case the file is malformed `from_der()` and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) sk_pem = sk.to_pem() sk2 = SigningKey.from_pem(sk_pem) # sk and sk2 are the same key ``` Likewise, the `VerifyingKey` can be serialized in the same way: `vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and `to_der()/from_der()`. The same `curve=` argument is needed for `VerifyingKey.from_string()`. ```python from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_string = vk.to_string() vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) # vk and vk2 are the same key from ecdsa import SigningKey, VerifyingKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk_pem = vk.to_pem() vk2 = VerifyingKey.from_pem(vk_pem) # vk and vk2 are the same key ``` There are a couple of different ways to compute a signature. Fundamentally, ECDSA takes a number that represents the data being signed, and returns a pair of numbers that represent the signature. The `hashfunc=` argument to `sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a fixed-length digest, which is then turned into a number that ECDSA can sign, and both sign and verify must use the same approach. The default value is `hashlib.sha1`, but if you use NIST256p or a longer curve, you can use `hashlib.sha256` instead. There are also multiple ways to represent a signature. The default `sk.sign()` and `vk.verify()` methods present it as a short string, for simplicity and minimal overhead. To use a different scheme, use the `sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper functions in the `ecdsa.util` module that can be useful here. It is also possible to create a `SigningKey` from a "seed", which is deterministic. This can be used in protocols where you want to derive consistent signing keys from some other secret, for example when you want three separate keys and only want to store a single master secret. You should start with a uniformly-distributed unguessable seed with about `curve.baselen` bytes of entropy, and then use one of the helper functions in `ecdsa.util` to convert it into an integer in the correct range, and then finally pass it into `SigningKey.from_secret_exponent()`, like this: ```python import os from ecdsa import NIST384p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key(seed): secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) return SigningKey.from_secret_exponent(secexp, curve=NIST384p) seed = os.urandom(NIST384p.baselen) # or other starting point sk1a = make_key(seed) sk1b = make_key(seed) # note: sk1a and sk1b are the same key assert sk1a.to_string() == sk1b.to_string() sk2 = make_key(b"2-"+seed) # different key assert sk1a.to_string() != sk2.to_string() ``` In case the application will verify a lot of signatures made with a single key, it's possible to precompute some of the internal values to make signature verification significantly faster. The break-even point occurs at about 100 signatures verified. To perform precomputation, you can call the `precompute()` method on `VerifyingKey` instance: ```python from ecdsa import SigningKey, NIST384p sk = SigningKey.generate(curve=NIST384p) vk = sk.verifying_key vk.precompute() signature = sk.sign(b"message") assert vk.verify(signature, b"message") ``` Once `precompute()` was called, all signature verifications with this key will be faster to execute. ## OpenSSL Compatibility To produce signatures that can be verified by OpenSSL tools, or to verify signatures that were produced by those tools, use: ```python # openssl ecparam -name prime256v1 -genkey -out sk.pem # openssl ec -in sk.pem -pubout -out vk.pem # echo "data for signing" > data # openssl dgst -sha256 -sign sk.pem -out data.sig data # openssl dgst -sha256 -verify vk.pem -signature data.sig data # openssl dgst -sha256 -prverify sk.pem -signature data.sig data import hashlib from ecdsa import SigningKey, VerifyingKey from ecdsa.util import sigencode_der, sigdecode_der with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("data", "rb") as f: data = f.read() with open("data.sig", "rb") as f: signature = f.read() assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) with open("sk.pem") as f: sk = SigningKey.from_pem(f.read(), hashlib.sha256) new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) with open("data.sig2", "wb") as f: f.write(new_signature) # openssl dgst -sha256 -verify vk.pem -signature data.sig2 data ``` Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the `sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for respectively writing and reading the signatures. The keys also can be written in format that openssl can handle: ```python from ecdsa import SigningKey, VerifyingKey with open("sk.pem") as f: sk = SigningKey.from_pem(f.read()) with open("sk.pem", "wb") as f: f.write(sk.to_pem()) with open("vk.pem") as f: vk = VerifyingKey.from_pem(f.read()) with open("vk.pem", "wb") as f: f.write(vk.to_pem()) ``` ## Entropy Creating a signing key with `SigningKey.generate()` requires some form of entropy (as opposed to `from_secret_exponent`/`from_string`/`from_der`/`from_pem`, which are deterministic and do not require an entropy source). The default source is `os.urandom()`, but you can pass any other function that behaves like `os.urandom` as the `entropy=` argument to do something different. This may be useful in unit tests, where you want to achieve repeatable results. The `ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong pseudo-random stream from it: ```python from ecdsa.util import PRNG from ecdsa import SigningKey rng1 = PRNG(b"seed") sk1 = SigningKey.generate(entropy=rng1) rng2 = PRNG(b"seed") sk2 = SigningKey.generate(entropy=rng2) # sk1 and sk2 are the same key ``` Likewise, ECDSA signature generation requires a random number, and each signature must use a different one (using the same number twice will immediately reveal the private signing key). The `sk.sign()` method takes an `entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. ## Deterministic Signatures If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, the code will generate a deterministic signature instead of a random one. This uses the algorithm from RFC6979 to safely generate a unique `k` value, derived from the private key and the message being signed. Each time you sign the same message with the same key, you will get the same signature (using the same `k`). This may become the default in a future version, as it is not vulnerable to failures of the entropy source. ## Examples Create a NIST192p key pair and immediately save both to disk: ```python from ecdsa import SigningKey sk = SigningKey.generate() vk = sk.verifying_key with open("private.pem", "wb") as f: f.write(sk.to_pem()) with open("public.pem", "wb") as f: f.write(vk.to_pem()) ``` Load a signing key from disk, use it to sign a message (using SHA-1), and write the signature to disk: ```python from ecdsa import SigningKey with open("private.pem") as f: sk = SigningKey.from_pem(f.read()) with open("message", "rb") as f: message = f.read() sig = sk.sign(message) with open("signature", "wb") as f: f.write(sig) ``` Load the verifying key, message, and signature from disk, and verify the signature (assume SHA-1 hash): ```python from ecdsa import VerifyingKey, BadSignatureError vk = VerifyingKey.from_pem(open("public.pem").read()) with open("message", "rb") as f: message = f.read() with open("signature", "rb") as f: sig = f.read() try: vk.verify(sig, message) print "good signature" except BadSignatureError: print "BAD SIGNATURE" ``` Create a NIST521p key pair: ```python from ecdsa import SigningKey, NIST521p sk = SigningKey.generate(curve=NIST521p) vk = sk.verifying_key ``` Create three independent signing keys from a master seed: ```python from ecdsa import NIST192p, SigningKey from ecdsa.util import randrange_from_seed__trytryagain def make_key_from_seed(seed, curve=NIST192p): secexp = randrange_from_seed__trytryagain(seed, curve.order) return SigningKey.from_secret_exponent(secexp, curve) sk1 = make_key_from_seed("1:%s" % seed) sk2 = make_key_from_seed("2:%s" % seed) sk3 = make_key_from_seed("3:%s" % seed) ``` Load a verifying key from disk and print it using hex encoding in uncompressed and compressed format (defined in X9.62 and SEC1 standards): ```python from ecdsa import VerifyingKey with open("public.pem") as f: vk = VerifyingKey.from_pem(f.read()) print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) print("compressed: {0}".format(vk.to_string("compressed").hex())) ``` Load a verifying key from a hex string from compressed format, output uncompressed: ```python from ecdsa import VerifyingKey, NIST256p comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) print(vk.to_string("uncompressed").hex()) ``` ECDH key exchange with remote party: ```python from ecdsa import ECDH, NIST256p ecdh = ECDH(curve=NIST256p) ecdh.generate_private_key() local_public_key = ecdh.get_public_key() #send `local_public_key` to remote party and receive `remote_public_key` from remote party with open("remote_public_key.pem") as e: remote_public_key = e.read() ecdh.load_received_public_key_pem(remote_public_key) secret = ecdh.generate_sharedsecret_bytes() ``` Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* Description-Content-Type: text/markdown Provides-Extra: gmpy2 Provides-Extra: gmpy License-File: LICENSE ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371717.0 ecdsa-0.18.0/src/ecdsa.egg-info/SOURCES.txt0000664005075200507520000000331214262276105017501 0ustar00hkariohkario.coveragerc .gitattributes .gitignore .gitleaks.toml .readthedocs.yaml .travis.yml LICENSE MANIFEST.in NEWS README.md build-requirements-2.6.txt build-requirements-2.7.txt build-requirements-3.3.txt build-requirements-3.4.txt build-requirements.txt diff-instrumental.py setup.cfg setup.py speed.py tox.ini versioneer.py .github/workflows/ci.yml docs/Makefile docs/make.bat docs/requirements.txt docs/source/basics.rst docs/source/conf.py docs/source/ec_arithmetic.rst docs/source/ecdsa.curves.rst docs/source/ecdsa.der.rst docs/source/ecdsa.ecdh.rst docs/source/ecdsa.ecdsa.rst docs/source/ecdsa.eddsa.rst docs/source/ecdsa.ellipticcurve.rst docs/source/ecdsa.errors.rst docs/source/ecdsa.keys.rst docs/source/ecdsa.numbertheory.rst docs/source/ecdsa.rfc6979.rst docs/source/ecdsa.rst docs/source/ecdsa.util.rst docs/source/glossary.rst docs/source/index.rst docs/source/modules.rst docs/source/quickstart.rst src/ecdsa/__init__.py src/ecdsa/_compat.py src/ecdsa/_rwlock.py src/ecdsa/_sha3.py src/ecdsa/_version.py src/ecdsa/curves.py src/ecdsa/der.py src/ecdsa/ecdh.py src/ecdsa/ecdsa.py src/ecdsa/eddsa.py src/ecdsa/ellipticcurve.py src/ecdsa/errors.py src/ecdsa/keys.py src/ecdsa/numbertheory.py src/ecdsa/rfc6979.py src/ecdsa/test_curves.py src/ecdsa/test_der.py src/ecdsa/test_ecdh.py src/ecdsa/test_ecdsa.py src/ecdsa/test_eddsa.py src/ecdsa/test_ellipticcurve.py src/ecdsa/test_jacobi.py src/ecdsa/test_keys.py src/ecdsa/test_malformed_sigs.py src/ecdsa/test_numbertheory.py src/ecdsa/test_pyecdsa.py src/ecdsa/test_rw_lock.py src/ecdsa/test_sha3.py src/ecdsa/util.py src/ecdsa.egg-info/PKG-INFO src/ecdsa.egg-info/SOURCES.txt src/ecdsa.egg-info/dependency_links.txt src/ecdsa.egg-info/requires.txt src/ecdsa.egg-info/top_level.txt././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371717.0 ecdsa-0.18.0/src/ecdsa.egg-info/dependency_links.txt0000664005075200507520000000000114262276105021664 0ustar00hkariohkario ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371717.0 ecdsa-0.18.0/src/ecdsa.egg-info/requires.txt0000664005075200507520000000004714262276105020217 0ustar00hkariohkariosix>=1.9.0 [gmpy] gmpy [gmpy2] gmpy2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657371717.0 ecdsa-0.18.0/src/ecdsa.egg-info/top_level.txt0000664005075200507520000000000614262276105020344 0ustar00hkariohkarioecdsa ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1648836739.0 ecdsa-0.18.0/tox.ini0000664005075200507520000000570414221640203013564 0ustar00hkariohkario [tox] envlist = py26, py27, py33, py34, py35, py36, py37, py38, py39, py310, py311, py, pypy, pypy3, gmpy2py27, gmpy2py39, gmpy2py310, gmpypy27, gmpypy39, gmpypy310, codechecks [testenv] deps = py{33}: py<1.5 py{33}: pytest<3.3 py{33}: enum34 py{33}: hypothesis<3.44 py{26}: unittest2 py{26}: hypothesis<3 py{34}: attrs<21 py{26,27,34,35,36,37,38,39,310,311,py,py3}: pytest py{27,34,35,36,37,38,39,310,311,py,py3}: hypothesis gmpy2py{27,39,310,311}: gmpy2 gmpypy{27,39,310,311}: gmpy gmpy{2py27,2py39,2py310,2py311,py27,py39,py310,py311}: pytest gmpy{2py27,2py39,2py310,2py311,py27,py39,py310,py311}: hypothesis # six==1.9.0 comes from setup.py install_requires py27_old_six: six==1.9.0 py27_old_six: pytest py27_old_six: hypothesis # those are the oldest versions of gmpy and gmpy2 on PyPI (i.e. oldest we can # actually test), older versions may work, but are not easy to test py27_old_gmpy: gmpy==1.15 py27_old_gmpy: pytest py27_old_gmpy: hypothesis py27_old_gmpy2: gmpy2==2.0.1 py27_old_gmpy2: pytest py27_old_gmpy2: hypothesis py: pytest py: hypothesis py{33}: wheel<0.30 coverage commands = coverage run --branch -m pytest {posargs:src/ecdsa} [testenv:py27_old_gmpy] basepython = python2.7 [testenv:py27_old_gmpy2] basepython = python2.7 [testenv:py27_old_six] basepython = python2.7 [testenv:gmpypy27] basepython=python2.7 [testenv:gmpypy39] basepython=python3.9 [testenv:gmpypy310] basepython=python3.10 [testenv:gmpypy311] basepython=python3.11 [testenv:gmpy2py27] basepython=python2.7 [testenv:gmpy2py39] basepython=python3.9 [testenv:gmpy2py310] basepython=python3.10 [testenv:gmpy2py311] basepython=python3.11 [testenv:instrumental] basepython = python2.7 deps = gmpy2 instrumental hypothesis pytest>=4.6.0 coverage six commands = instrumental -t ecdsa -i '.*test_.*|.*_version|.*_compat|.*_sha3' {envbindir}/pytest {posargs:src/ecdsa} instrumental -f .instrumental.cov -sr [testenv:coverage] sitepackages=True whitelist_externals=coverage commands = coverage run --branch -m pytest --hypothesis-show-statistics {posargs:src/ecdsa} coverage html coverage report -m [testenv:speed] commands = {envpython} speed.py [testenv:speedgmpy] deps = gmpy commands = {envpython} speed.py [testenv:speedgmpy2] deps = gmpy2 commands = {envpython} speed.py [testenv:codechecks] basepython = python3 deps = black==22.3.0 flake8 commands = flake8 setup.py speed.py src black --check --line-length 79 . [testenv:codeformat] basepython = python3 deps = black==22.3.0 commands = black --line-length 79 . [flake8] exclude = src/ecdsa/test*.py # We're just getting started. For now, ignore the following problems: # E203: whitespace before ':' (this needs to be ignored for black compatibility) # E741: ambiguous variable name extend-ignore = E203,E741 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1641309332.0 ecdsa-0.18.0/versioneer.py0000664005075200507520000023630414165062224015016 0ustar00hkariohkario# Version: 0.21 """The Versioneer - like a rocketeer, but for versions. The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain * Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 * [![Latest Version][pypi-image]][pypi-url] * [![Build Status][travis-image]][travis-url] This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere in your $PATH * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) * run `versioneer install` in your source tree, commit the results * Verify version information with `python setup.py version` ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes). The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## Similar projects * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time dependency * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of versioneer * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools plugin ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the Creative Commons "Public Domain Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg [pypi-url]: https://pypi.python.org/pypi/versioneer/ [travis-image]: https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer """ # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with # pylint:disable=attribute-defined-outside-init,too-many-arguments from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_root(): """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ( "Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND')." ) raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. my_path = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print( "Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(my_path), versioneer_py) ) except NameError: pass return root def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.ConfigParser() with open(setup_cfg, "r") as cfg_file: if sys.version_info < (3, 0): parser.readfp(cfg_file) else: parser.read_file(cfg_file) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS if sys.version_info < (3, 0): cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") else: # Dict-like interface for non-mandatory entries section = parser["versioneer"] cfg.style = section.get("style", "") cfg.versionfile_source = section.get("versionfile_source") cfg.versionfile_build = section.get("versionfile_build") cfg.tag_prefix = section.get("tag_prefix") cfg.parentdir_prefix = section.get("parentdir_prefix") cfg.verbose = section.get("verbose") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" HANDLERS.setdefault(vcs, {})[method] = f return f return decorate def run_command( commands, args, cwd=None, verbose=False, hide_stderr=False, env=None ): """Call the given command(s).""" assert isinstance(commands, list) process = None for command in commands: try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git process = subprocess.Popen( [command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), ) break except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = process.communicate()[0].strip().decode() if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, process.returncode return stdout, process.returncode LONG_VERSION_PY[ "git" ] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys from typing import Callable, Dict def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY: Dict[str, str] = {} HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs, method): # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) process = None for command in commands: try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git process = subprocess.Popen([command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = process.communicate()[0].strip().decode() if process.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, process.returncode return stdout, process.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) except OSError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') if not re.match(r'\d', r): continue if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] TAG_PREFIX_REGEX = r"\*" _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s%%s" %% (tag_prefix, TAG_PREFIX_REGEX)], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") branch_name = branch_name.strip() if branch_name == "HEAD": # If we aren't exactly on a branch, pick a branch which represents # the current commit. If all else fails, we are on a branchless # commit. branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) # --contains was added in git-1.5.4 if rc != 0 or branches is None: raise NotThisMethod("'git branch --contains' returned error") branches = branches.split("\n") # Remove the first line if we're running detached if "(" in branches[0]: branches.pop(0) # Strip off the leading "* " from the list of branches. branches = [branch[2:] for branch in branches] if "master" in branches: branch_name = "master" elif not branches: branch_name = None else: # Pick the first branch that is returned. Good or bad. branch_name = branches[0] pieces["branch"] = branch_name # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_branch(pieces): """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards (a feature branch will appear "older" than the master branch). Exceptions: 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def pep440_split_post(ver): """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the post-release version number (or -1 if no post-release segment is present). """ vc = str.split(ver, ".post") return vc[0], int(vc[1] or 0) if len(vc) == 2 else None def render_pep440_pre(pieces): """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: if pieces["distance"]: # update the post release segment tag_version, post_version = pep440_split_post(pieces["closest-tag"]) rendered = tag_version if post_version is not None: rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) else: rendered += ".post0.dev%%d" %% (pieces["distance"]) else: # no commits, use the tag as the version rendered = pieces["closest-tag"] else: # exception #1 rendered = "0.post0.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_post_branch(pieces): """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-branch": rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-post-branch": rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) except OSError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix) :] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') if not re.match(r"\d", r): continue if verbose: print("picking %s" % r) return { "version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date, } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return { "version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None, } @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] TAG_PREFIX_REGEX = r"\*" _, rc = runner( GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True ) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = runner( GITS, [ "describe", "--tags", "--dirty", "--always", "--long", "--match", "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), ], cwd=root, ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None branch_name, rc = runner( GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root ) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") branch_name = branch_name.strip() if branch_name == "HEAD": # If we aren't exactly on a branch, pick a branch which represents # the current commit. If all else fails, we are on a branchless # commit. branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) # --contains was added in git-1.5.4 if rc != 0 or branches is None: raise NotThisMethod("'git branch --contains' returned error") branches = branches.split("\n") # Remove the first line if we're running detached if "(" in branches[0]: branches.pop(0) # Strip off the leading "* " from the list of branches. branches = [branch[2:] for branch in branches] if "master" in branches: branch_name = "master" elif not branches: branch_name = None else: # Pick the first branch that is returned. Good or bad. branch_name = branches[0] pieces["branch"] = branch_name # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ( "unable to parse git-describe output: '%s'" % describe_out ) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( full_tag, tag_prefix, ) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 0 ].strip() # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: my_path = __file__ if my_path.endswith(".pyc") or my_path.endswith(".pyo"): my_path = os.path.splitext(my_path)[0] + ".py" versioneer_file = os.path.relpath(my_path) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: with open(".gitattributes", "r") as fobj: for line in fobj: if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True break except OSError: pass if not present: with open(".gitattributes", "a+") as fobj: fobj.write("{0} export-subst\n".format(versionfile_source)) files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return { "version": dirname[len(parentdir_prefix) :], "full-revisionid": None, "dirty": False, "error": None, "date": None, } rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print( "Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix) ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.21) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except OSError: raise NotThisMethod("unable to read _version.py") mo = re.search( r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S, ) if not mo: mo = re.search( r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S, ) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps( versions, sort_keys=True, indent=1, separators=(",", ": ") ) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_branch(pieces): """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards (a feature branch will appear "older" than the master branch). Exceptions: 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def pep440_split_post(ver): """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the post-release version number (or -1 if no post-release segment is present). """ vc = str.split(ver, ".post") return vc[0], int(vc[1] or 0) if len(vc) == 2 else None def render_pep440_pre(pieces): """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: if pieces["distance"]: # update the post release segment tag_version, post_version = pep440_split_post( pieces["closest-tag"] ) rendered = tag_version if post_version is not None: rendered += ".post%d.dev%d" % ( post_version + 1, pieces["distance"], ) else: rendered += ".post0.dev%d" % (pieces["distance"]) else: # no commits, use the tag as the version rendered = pieces["closest-tag"] else: # exception #1 rendered = "0.post0.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_post_branch(pieces): """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["branch"] != "master": rendered += ".dev0" rendered += "+g%s" % pieces["short"] if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return { "version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None, } if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-branch": rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-post-branch": rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return { "version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date"), } class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert ( cfg.versionfile_source is not None ), "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return { "version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None, } def get_version(): """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(cmdclass=None): """Get the custom setuptools/distutils subclasses used by Versioneer. If the package uses a different cmdclass (e.g. one from numpy), it should be provide as an argument. """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/python-versioneer/python-versioneer/issues/52 cmds = {} if cmdclass is None else cmdclass.copy() # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "build_py" in cmds: _build_py = cmds["build_py"] elif "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join( self.build_lib, cfg.versionfile_build ) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "build_ext" in cmds: _build_ext = cmds["build_ext"] elif "setuptools" in sys.modules: from setuptools.command.build_ext import build_ext as _build_ext else: from distutils.command.build_ext import build_ext as _build_ext class cmd_build_ext(_build_ext): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_ext.run(self) if self.inplace: # build_ext --inplace will only build extensions in # build/lib<..> dir with no _version.py to write to. # As in place builds will already have a _version.py # in the module dir, we do not need to write one. return # now locate _version.py in the new build/ directory and replace # it with an updated value target_versionfile = os.path.join( self.build_lib, cfg.versionfile_build ) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_ext"] = cmd_build_ext if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write( LONG % { "DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, } ) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if "py2exe" in sys.modules: # py2exe enabled? from py2exe.distutils_buildexe import py2exe as _py2exe class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write( LONG % { "DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, } ) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "sdist" in cmds: _sdist = cmds["sdist"] elif "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file( target_versionfile, self._versioneer_generated_versions ) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ OLD_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ INIT_PY_SNIPPET = """ from . import {0} __version__ = {0}.get_versions()['version'] """ def do_setup(): """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except ( OSError, configparser.NoSectionError, configparser.NoOptionError, ) as e: if isinstance(e, (OSError, configparser.NoSectionError)): print( "Adding sample versioneer config to setup.cfg", file=sys.stderr ) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write( LONG % { "DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, } ) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except OSError: old = "" module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] snippet = INIT_PY_SNIPPET.format(module) if OLD_SNIPPET in old: print(" replacing boilerplate in %s" % ipy) with open(ipy, "w") as f: f.write(old.replace(OLD_SNIPPET, snippet)) elif snippet not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(snippet) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except OSError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print( " appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source ) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)