diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..b1d4a1f6 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,689 @@ +name: GitHub CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + runs-on: ${{ matrix.os }} + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + include: + - name: py2.6 + os: ubuntu-latest + container: centos:6 + python-version: 2.6 + - name: py3.10 with ossl3.0 + os: ubuntu-latest + container: ubuntu:22.04 + python-version: "3.10" + tox-env: py310 + - name: py2.7 + os: ubuntu-20.04 + python-version: 2.7 + tox-env: py27 + - name: py2.7 with old gmpy + os: ubuntu-20.04 + python-version: 2.7 + tox-env: py27_old_gmpy + - name: py2.7 with old gmpy2 + os: ubuntu-20.04 + python-version: 2.7 + tox-env: py27_old_gmpy2 + - name: py2.7 with old six + os: ubuntu-20.04 + python-version: 2.7 + tox-env: py27_old_six + - name: py2.7 with gmpy + os: ubuntu-20.04 + python-version: 2.7 + tox-env: gmpypy27 + - name: py2.7 with gmpy2 + os: ubuntu-20.04 + python-version: 2.7 + tox-env: gmpy2py27 + - name: py3.6 + os: ubuntu-20.04 + python-version: 3.6 + tox-env: py36 + - name: py3.7 + os: ubuntu-22.04 + python-version: 3.7 + tox-env: py37 + - name: py3.8 + os: ubuntu-latest + python-version: 3.8 + tox-env: py38 + - name: py3.9 + os: ubuntu-latest + python-version: 3.9 + tox-env: py39 + - name: py3.10 + os: ubuntu-latest + python-version: '3.10' + tox-env: py310 + - name: py3.10 with gmpy + os: ubuntu-latest + python-version: '3.10' + tox-env: gmpypy310 + - name: py3.10 with gmpy2 + os: ubuntu-latest + python-version: '3.10' + tox-env: gmpy2py310 + - name: py3.11 + os: ubuntu-latest + python-version: '3.11' + tox-env: py311 + - name: py3.12 + os: ubuntu-latest + python-version: '3.12' + tox-env: py312 + - name: py3.13 + os: ubuntu-latest + python-version: '3.13' + tox-env: py313 + - name: pypy + os: ubuntu-latest + python-version: pypy-2.7 + tox-env: pypy + - name: pypy3 + os: ubuntu-22.04 + python-version: pypy-3.7 + tox-env: pypy3 + # special configurations + - name: py2.7 with instrumental + os: ubuntu-20.04 + python-version: 2.7 + opt-deps: ['instrumental'] + - name: code checks + os: ubuntu-latest + python-version: 3.9 + tox-env: codechecks + - name: mutation testing + os: ubuntu-latest + python-version: '3.11' + mutation: 'true' + steps: + - uses: actions/checkout@v2 + if: ${{ !matrix.container }} + with: + fetch-depth: 50 + - uses: actions/checkout@v1 + # centos 6 doesn't have glibc new enough for the nodejs used by v2 + if: ${{ matrix.container }} + with: + fetch-depth: 50 + - name: Ensure dependencies on CentOS + if: ${{ matrix.container == 'centos:6' }} + run: | + ls /etc/yum.repos.d/ + cat /etc/yum.repos.d/CentOS-Base.repo + rm /etc/yum.repos.d/CentOS-Base.repo + cat > /etc/yum.repos.d/CentOS-Base.repo < cosmic-ray.html + - name: Archive mutation testing results + if: ${{ matrix.mutation == 'true' && github.event.pull_request }} + uses: actions/upload-artifact@v4 + with: + name: mutation-PR-coverage-report + path: cosmic-ray.html + - name: Check test coverage for PR + if: ${{ matrix.mutation == 'true' && github.event.pull_request }} + run: | + # check if executed have at most 50% survival rate + cr-rate --estimate --confidence 99.9 --fail-over 50 session-vs-master.sqlite + - name: instrumental test coverage on PR + if: ${{ contains(matrix.opt-deps, 'instrumental') && github.event.pull_request }} + env: + BASE_REF: ${{ github.event.pull_request.base.ref }} + run: | + git fetch origin $BASE_REF + MERGE_BASE=$(git merge-base origin/$BASE_REF HEAD) + echo "MERGE_BASE:" $MERGE_BASE + git checkout $MERGE_BASE + instrumental -t ecdsa -i '.*test_.*|.*_version|.*_compat|.*_sha3' `which pytest` src/ecdsa/test*.py + instrumental -f .instrumental.cov -s + instrumental -f .instrumental.cov -s | python diff-instrumental.py --save .diff-instrumental + git checkout $GITHUB_SHA + instrumental -t ecdsa -i '.*test_.*|.*_version|.*_compat|.*_sha3' `which pytest` src/ecdsa/test*.py + instrumental -f .instrumental.cov -sr + instrumental -f .instrumental.cov -s | python diff-instrumental.py --read .diff-instrumental --fail-under 70 --max-difference -0.1 + - name: instrumental test coverage on push + if: ${{ contains(matrix.opt-deps, 'instrumental') && !github.event.pull_request }} + run: | + instrumental -t ecdsa -i '.*test_.*|.*_version|.*_compat|.*_sha3' `which pytest` src/ecdsa + instrumental -f .instrumental.cov -s + # just log the values when merging + instrumental -f .instrumental.cov -s | python diff-instrumental.py + echo "COND_COV=$(instrumental -f .instrumental.cov -s | python diff-instrumental.py --raw)" >> $GITHUB_ENV + - name: Create condition coverage badge + uses: schneegans/dynamic-badges-action@v1.4.0 + if: ${{ contains(matrix.opt-deps, 'instrumental') && !github.event.pull_request }} + with: + auth: ${{ secrets.GIST_SECRET }} + gistID: 9b6ca1f3410207fbeca785a178781651 + filename: python-ecdsa-condition-coverage.json + label: condition coverage + message: ${{ env.COND_COV }}% + valColorRange: ${{ env.COND_COV }} + maxColorRange: 100 + minColorRange: 0 + - name: Publish coverage to Coveralls + if: ${{ !matrix.opt-deps && matrix.tox-env != 'codechecks' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_FLAG_NAME: ${{ matrix.name }} + COVERALLS_PARALLEL: true + COVERALLS_SERVICE_NAME: github + PY_VERSION: ${{ matrix.python-version }} + run: | + if [[ $PY_VERSION == "2.6" ]]; then + COVERALLS_SKIP_SSL_VERIFY=1 coveralls + else + coveralls + fi + + coveralls: + name: Indicate completion to coveralls.io + needs: test + runs-on: ubuntu-latest + container: python:3-slim + steps: + - name: Install coveralls + run: | + pip3 install --upgrade coveralls + - name: Send "finished" signal to coveralls + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_SERVICE_NAME: github + run: | + coveralls --finish + + mutation-prepare: + name: Prepare job files for the mutation runners + # use runner minutes on mutation testing only after the PR passed basic + # testing + needs: coveralls + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + if: ${{ !matrix.container }} + with: + fetch-depth: 50 + - name: save session objects + uses: actions/cache@v3 + with: + path: | + sessions/ + key: sessions-${{ github.sha }} + - name: Install cosmic-ray + run: | + pip3 install cosmic-ray + pip install pytest-timeout + - name: Install dependencies + run: | + sudo apt-get install -y sqlite3 + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Create list of mutations + run: | + cosmic-ray init cosmic-ray.toml session.sqlite + - name: Log number of jobs created + run: | + cr-report session.sqlite | tail -n 3 + - name: Split up mutations to workers + run: | + cp session.sqlite session-to_del.sqlite + sqlite3 session-to_del.sqlite "$(cat sql/create_to_del.sql)" + mkdir sessions + for i in $(seq 0 19); do + sed "s/%SHARD%/$i/" < sql/shard-db.sql > shard.sql + cp session-to_del.sqlite session-$i.sqlite + sqlite3 session-$i.sqlite "$(cat shard.sql)" + mv session-$i.sqlite sessions/ + done + mutation-execute: + name: Execute mutation testing + needs: mutation-prepare + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - name: 0 + - name: 1 + - name: 2 + - name: 3 + - name: 4 + - name: 5 + - name: 6 + - name: 7 + - name: 8 + - name: 9 + - name: 10 + - name: 11 + - name: 12 + - name: 13 + - name: 14 + - name: 15 + - name: 16 + - name: 17 + - name: 18 + - name: 19 + steps: + - uses: actions/checkout@v2 + if: ${{ !matrix.container }} + with: + fetch-depth: 1 + - name: Session objects + uses: actions/cache@v3 + with: + path: | + sessions/ + key: sessions-${{ github.sha }} + - name: Session done objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-${{ matrix.name }}-done.sqlite + key: sessions-${{ github.sha }}-${{ matrix.name }}-done + - name: Install gmpy2 dependencies + run: sudo apt-get install -y libmpfr-dev libmpc-dev + - name: Install gmpy2 + run: pip install gmpy2 + - name: Install build dependencies + run: | + pip install -r build-requirements.txt + pip install cosmic-ray + pip install pytest-timeout + - name: Run mutation testing + run: | + cp sessions/session-${{ matrix.name }}.sqlite session.sqlite + systemd-run --user --scope -p MemoryMax=2G -p MemoryHigh=2G cosmic-ray exec cosmic-ray.toml session.sqlite & + cosmic_pid=$! + sleep 1 + for i in $(seq 1 10); do + if ! kill -s 0 $cosmic_pid; then + break + fi + echo $i + sleep 60 + done + kill -s 0 $cosmic_pid && kill $cosmic_pid + mkdir sessions-done/ + cp session.sqlite sessions-done/session-${{ matrix.name }}-done.sqlite + - name: Report executed + run: | + cr-report session.sqlite | tail -n 3 + mutation-combine: + name: Combine mutation testing results + needs: mutation-execute + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + if: ${{ !matrix.container }} + with: + fetch-depth: 1 + - name: Session done 0 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-0-done.sqlite + key: sessions-${{ github.sha }}-0-done + - name: Session done 1 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-1-done.sqlite + key: sessions-${{ github.sha }}-1-done + - name: Session done 2 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-2-done.sqlite + key: sessions-${{ github.sha }}-2-done + - name: Session done 3 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-3-done.sqlite + key: sessions-${{ github.sha }}-3-done + - name: Session done 4 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-4-done.sqlite + key: sessions-${{ github.sha }}-4-done + - name: Session done 5 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-5-done.sqlite + key: sessions-${{ github.sha }}-5-done + - name: Session done 6 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-6-done.sqlite + key: sessions-${{ github.sha }}-6-done + - name: Session done 7 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-7-done.sqlite + key: sessions-${{ github.sha }}-7-done + - name: Session done 8 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-8-done.sqlite + key: sessions-${{ github.sha }}-8-done + - name: Session done 9 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-9-done.sqlite + key: sessions-${{ github.sha }}-9-done + - name: Session done 10 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-10-done.sqlite + key: sessions-${{ github.sha }}-10-done + - name: Session done 11 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-11-done.sqlite + key: sessions-${{ github.sha }}-11-done + - name: Session done 12 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-12-done.sqlite + key: sessions-${{ github.sha }}-12-done + - name: Session done 13 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-13-done.sqlite + key: sessions-${{ github.sha }}-13-done + - name: Session done 14 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-14-done.sqlite + key: sessions-${{ github.sha }}-14-done + - name: Session done 15 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-15-done.sqlite + key: sessions-${{ github.sha }}-15-done + - name: Session done 16 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-16-done.sqlite + key: sessions-${{ github.sha }}-16-done + - name: Session done 17 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-17-done.sqlite + key: sessions-${{ github.sha }}-17-done + - name: Session done 18 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-18-done.sqlite + key: sessions-${{ github.sha }}-18-done + - name: Session done 19 objects + uses: actions/cache@v3 + with: + path: | + sessions-done/session-19-done.sqlite + key: sessions-${{ github.sha }}-19-done + - name: Install cosmic-ray + run: | + pip3 install cosmic-ray + pip install pytest-timeout + - name: Install dependencies + run: | + sudo apt-get install -y sqlite3 + - name: Combine worker results + run: | + cp sessions-done/session-0-done.sqlite session.sqlite + for i in $(seq 1 19); do + cp sessions-done/session-$i-done.sqlite session-to_merge.sqlite && sqlite3 session.sqlite "$(cat sql/combine.sql)" || true + done + - name: Report executed + run: | + cr-report session.sqlite | tail -n 3 + - name: Generate html report + run: | + cr-html session.sqlite > cosmic-ray.html + - name: Archive mutation testing results + uses: actions/upload-artifact@v4 + with: + name: mutation-coverage-report + path: cosmic-ray.html + - name: Get mutation score + run: | + echo "print('{0:.2f}'.format(100-$(cr-rate session.sqlite)))" > print-score.py + echo "MUT_SCORE=$(python print-score.py)" >> $GITHUB_ENV + - name: Create mutation score badge + if: ${{ !github.event.pull_request }} + uses: schneegans/dynamic-badges-action@v1.4.0 + with: + auth: ${{ secrets.GIST_SECRET }} + gistID: 9b6ca1f3410207fbeca785a178781651 + filename: python-ecdsa-mutation-score.json + label: mutation score + message: ${{ env.MUT_SCORE }}% + valColorRange: ${{ env.MUT_SCORE }} + maxColorRange: 100 + minColorRange: 0 + - name: Check survival estimate + run: cr-rate --estimate --fail-over 32 --confidence 99.9 session.sqlite diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000..c671375c --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + schedule: + - cron: "59 21 * * 0" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ python ] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{ matrix.language }}" diff --git a/.gitignore b/.gitignore index 2e8eb6c7..89082f94 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *.py[cod] MANIFEST +htmlcov # C extensions *.so @@ -49,3 +50,5 @@ t/ # Backup files *.swp *~ +.idea +.cache diff --git a/.gitleaks.toml b/.gitleaks.toml new file mode 100644 index 00000000..89cae0db --- /dev/null +++ b/.gitleaks.toml @@ -0,0 +1,3 @@ +[allowlist] + description = "Ignore private keys in test files" + files = [ '''test_.*''' ] diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..14dd391c --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,26 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/source/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: requirements.txt + - requirements: docs/requirements.txt diff --git a/.travis.yml b/.travis.yml index 05f65d40..8729ca80 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,11 +12,17 @@ addons: - libmpc-dev before_cache: - rm -f $HOME/.cache/pip/log/debug.log -# place the slowest (instrumental and py2.6) first +# place the slowest (instrumental, mutation and py2.6) first matrix: include: + - python: 3.9 + dist: bionic + sudo: true + env: MUTATION=yes - python: 2.7 env: INSTRUMENTAL=yes + dist: bionic + sudo: true - python: 2.6 env: TOX_ENV=py26 - python: 2.7 @@ -44,19 +50,23 @@ matrix: dist: bionic sudo: true - python: 3.8 + env: TOX_ENV=py38 + dist: bionic + sudo: true + - python: 3.9 env: TOX_ENV=codechecks dist: bionic sudo: true - - python: 3.8 - env: TOX_ENV=py38 + - python: 3.9 + env: TOX_ENV=py39 dist: bionic sudo: true - - python: 3.8 - env: TOX_ENV=gmpypy38 + - python: 3.9 + env: TOX_ENV=gmpypy39 dist: bionic sudo: true - - python: 3.8 - env: TOX_ENV=gmpy2py38 + - python: 3.9 + env: TOX_ENV=gmpy2py39 dist: bionic sudo: true - python: nightly @@ -67,6 +77,24 @@ matrix: env: TOX_ENV=pypy - python: pypy3 env: TOX_ENV=pypy3 + # We use explicit version as the PATH needs major-minor part + - name: "Python3.8.0 on Windows" + os: windows + language: shell + before_install: + - choco install python --version 3.8.0 + - python -m pip install --upgrade pip + env: PATH=/c/Python38:/c/Python38/Scripts:$PATH + install: + - pip list + - pip install six + - pip install -r build-requirements.txt + - pip list + script: + - coverage run --branch -m pytest src/ecdsa + after_success: + - coveralls + allow_failures: - python: nightly @@ -101,30 +129,29 @@ install: else travis_retry pip install -r build-requirements.txt; fi - - if [[ $TOX_ENV =~ gmpy2 ]]; then travis_retry pip install gmpy2; fi + - if [[ $TOX_ENV =~ gmpy2 ]] || [[ $INSTRUMENTAL ]] || [[ $MUTATION ]]; then travis_retry pip install gmpy2; fi - if [[ $TOX_ENV =~ gmpyp ]]; then travis_retry pip install gmpy; fi - if [[ $INSTRUMENTAL ]]; then travis_retry pip install instrumental; fi + - if [[ $MUTATION ]]; then travis_retry pip install cosmic-ray; fi - pip list script: - if [[ $TOX_ENV ]]; then tox -e $TOX_ENV; fi - - if [[ $TOX_ENV =~ gmpy2 ]]; then tox -e speedgmpy2; fi - - if [[ $TOX_ENV =~ gmpyp ]]; then tox -e speedgmpy; fi - - if ! [[ $TOX_ENV =~ gmpy ]]; then tox -e speed; fi + - if [[ $TOX_ENV =~ gmpy2 ]] && [[ -z $MUTATION ]]; then tox -e speedgmpy2; fi + - if [[ $TOX_ENV =~ gmpyp ]] && [[ -z $MUTATION ]]; then tox -e speedgmpy; fi + - if ! [[ $TOX_ENV =~ gmpy ]] && [[ -z $MUTATION ]]; then tox -e speed; fi - | if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST != "false" ]]; then git checkout $PR_FIRST^ - # exclude the super slow test_malformed_sigs.py, until #127 is merged - files="$(ls src/ecdsa/test*.py | grep -v test_malformed_sigs.py)" - instrumental -t ecdsa -i 'test.*|.*_version' `which pytest` $files + instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa/test*.py instrumental -f .instrumental.cov -s instrumental -f .instrumental.cov -s | python diff-instrumental.py --save .diff-instrumental git checkout $BRANCH - instrumental -t ecdsa -i 'test.*|.*_version' `which pytest` src/ecdsa + instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa/test*.py instrumental -f .instrumental.cov -sr fi - | if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST == "false" ]]; then - instrumental -t ecdsa -i 'test.*|.*_version' `which pytest` src/ecdsa + instrumental -t ecdsa -i 'test.*|.*_version|.*_compat' `which pytest` src/ecdsa instrumental -f .instrumental.cov -s # just log the values when merging instrumental -f .instrumental.cov -s | python diff-instrumental.py @@ -133,6 +160,26 @@ script: if [[ $INSTRUMENTAL && $TRAVIS_PULL_REQUEST != "false" ]]; then instrumental -f .instrumental.cov -s | python diff-instrumental.py --read .diff-instrumental --fail-under 70 --max-difference -0.1 fi + # cosmic-ray (mutation testing) runs + - if [[ $MUTATION ]]; then cosmic-ray init cosmic-ray.toml session.sqlite; fi + - if [[ $MUTATION ]]; then cosmic-ray baseline --report session.sqlite; fi + - if [[ $MUTATION ]]; then cr-report --show-output session.baseline.sqlite; fi + - | + if [[ $MUTATION ]]; then + cosmic-ray exec session.sqlite & + COSMIC_PID=$! + # make sure that we output something every 5 minutes (otherwise travis will kill us) + while kill -s 0 $COSMIC_PID; do + sleep 300 + cr-report session.sqlite | tail -n 3; + done & + REPORT_PID=$! + # kill exec after 40 minutes + (sleep $((60*40)); kill $COSMIC_PID) & + fi + - if [[ $MUTATION ]]; then wait $COSMIC_PID ; kill $REPORT_PID ; true; fi + - if [[ $MUTATION ]]; then cr-report --show-output session.sqlite | tail -n 40; fi + - if [[ $MUTATION ]]; then cr-rate --estimate --fail-over 29 --confidence 99.9 session.sqlite; fi after_success: - - if [[ -z $INSTRUMENTAL ]]; then coveralls; fi + - if [[ -z $INSTRUMENTAL && -z $MUTATION ]]; then coveralls; fi diff --git a/NEWS b/NEWS index e6747e7b..0d066f4b 100644 --- a/NEWS +++ b/NEWS @@ -1,3 +1,189 @@ +* Release 0.19.1 (13 Mar 2025) + +New API: +* ``der.remove_implitic`` and ``der.encode_implicit`` for decoding and + encoding DER IMPLICIT values with custom tag values and arbitrary + classes + +Bug fixes: +* Minor fixes around arithmetic with curves that have non-prime order + (useful for experimentation, not practical deployments) +* Fix arithmetic to work with curves that have (0, 0) on the curve +* Fix canonicalization of signatures when ``s`` is just slightly + above half of curve order + +Maintenance: +* Dropped official support for Python 3.5 (again, issues with CI, support + for Python 2.6 and Python 2.7 is unchanged) +* Officialy support Python 3.12 and 3.13 (add them to CI) +* Removal of few more unnecessary `six.b` literals (Alexandre Detiste) +* Fix typos in warning messages + + +* Release 0.19.0 (08 Apr 2024) + +New API: +* `to_ssh` in `VerifyingKey` and `SigningKey`, supports Ed25519 keys only + (Pablo Mazzini) + +New features: +* Support for twisted Brainpool curves + +Doc fix: +* Fix curve equation in glossary +* Documentation for signature encoding and signature decoding functions + +Maintenance: +* Dropped official support for 3.3 and 3.4 (because of problems running them + in CI, not because it's actually incompatible; support for 2.6 and 2.7 is + unaffected) +* Fixes aroung hypothesis parameters +* Officially support Python 3.11 and 3.12 +* Small updates to test suite to make it work with 3.11 and 3.12 and new + releases of test dependencies +* Dropped the internal `_rwlock` module as it's unused +* Added mutation testing to CI, lots of speed-ups to the test suite + to make it happen +* Removal of unnecessary `six.b` literals (Alexandre Detiste) + +Deprecations: +* `int_to_string`, `string_to_int`, and `digest_integer` from `ecdsa.ecdsa` + module are now considered deprecated, they will be removed in a future + release + +* Release 0.18.0 (09 Jul 2022) + +New API: +* `curve_by_name` in `curves` module to get a `Curve` object by providing curve + name. + +Bug fix: +* Make the `VerifyingKey` encoded with explicit parameters use the same + kind of point encoding for public key and curve generator. +* Better handling of malformed curve parameters (as in CVE-2022-0778); + make python-ecdsa raise `MalformedPointError` instead of `AssertionError`. + +Doc fix: +* Publish the documentation on https://ecdsa.readthedocs.io/, + include explanation of basics of handling of ECC data formats and how to use + the library for elliptic curve arithmetic. +* Make object names more consistent, make them into hyperlinks on the + readthedocs documentation. +* Make security note more explicit (Ian Rodney) +* Fix the `explicit` vs `named_curve` confusion in `VerifyingKey` docs. + +Maintenance: +* Updated black version; slight changes to formatting +* Include interoperability tests for Ed25519 and Ed448 with OpenSSL. + +* Release 0.18.0-beta2 (05 Jan 2022) + +New features: +* Support for point precomputation for EdDSA. + +Maintenance: +* Fix few typos (thanks to Tim Gates and Kian Meng Ang). + +Bug fix: +* Accept private EdDSA keys that include public key in the ASN.1 structure. +* Fix incompatibility with Python 3.3 in handling of memoryviews of empty + strings. + +* Release 0.18.0-beta1 (03 Aug 2021) + +New features: +* Support for EdDSA (Ed25519, Ed448) signature creation and verification. +* Support for Ed25519 and Ed448 in PKCS#8 and public key files. + +New API: +* CurveEdTw class to represent the Twisted Edwards curve parameters. +* PointEdwards class to represent points on Twisted Edwards curve and + provide point arithmetic on it. + +* Release 0.17.0 (27 May 2021) + +New API: +* Keys that use explicit curve parameters can now be read and written. + Reading of explicit curves can be disabled by using the + `valid_curve_encodings` keyword argument in `VerifyingKey.from_pem()`, + `VerifyingKey.from_der()`, `SigningKey.from_pem()`, and + `SigningKey.from_der()`. +* Keys can now be written with use of explicit curve parameters, + use `curve_parameters_encoding` keyword argument of `VerifyingKey.to_pem()`, + `VerifyingKey.to_der()`, `SigningKey.to_pem(), or `SigningKey.to_der()` to + specify the format. By default `named_curve` will be used, unless the + curve doesn't have an associated OID (as will be the case for an unsupported + curve), then `explicit` encoding will be used. +* Allow specifying acceptable point formats when loading public keys + (this also fixes a minor bug where python-ecdsa would accept raw + encoding for points in PKCS#8 files). Set of accepted encodings is controlled + by `valid_encodings` keyword argument in + `ECDH.load_received_public_key_bytes()`, `VerifyingKey.from_string()`, + `VerifyingKey.from_pem()`, VerifyingKey.from_der()`. +* `PointJacobi` and `Point` now inherit from `AbstractPoint` that implements + the methods for parsing points. That added `from_bytes()` and + `to_bytes()` methods to both of them. +* Curve parameters can now be read and written to PEM and DER files. The + `Curve` class supports new `to_der()`, `from_der()`, `to_pem()`, and + `from_pem()` methods. + +Doc fix: +* Describe in detail which methods can raise `RSZeroError`, and that + `SigningKey.sign_deterministic()` won't raise it. + +Bug fix: +* Correctly truncate hash values larger than the curve order (only impacted + custom curves and the curves added in this release). +* Correctly handle curves for which the order is larger than the prime + (only impacted custom curves and the secp160r1 curve added in this release). +* Fix the handling of `==` and `!=` for `Public_key`, `Private_key`, `Point`, + `PointJacobi`, `VerifyingKey`, and `SigningKey` so that it behaves + consistently and in the expected way both in Python 2 and Python 3. +* Implement lock-less algorithm inside PointJacobi for keeping shared state + so that when a calculation is aborted with KeyboardInterrupt, the state + doesn't become corrupted (this fixes the occasional breakage of ecdsa in + interactive shells). + +New features: +* The `speed.py` script now provides performance for signature verification + without the use of precomputation. +* New curves supported: secp112r1, secp112r2, secp128r1, secp160r1. + +Performance: +* Use 2-ary Non-Adjacent Form for the combined multiply-add. This speeds up + single-shot verify (i.e. without precomputation) by about 4 to 5%. +* Use native Python 3.8 support for calculating multiplicative inverses. + +Maintenance: +* Include Python 3.9 in PyPI keywords. +* More realistic branch coverage counting (ignore Python version-specific + branches). +* Additional test coverage to many parts of the library. +* Migrate to Github Actions for Continuous Testing. + +* Release 0.16.1 (12 Nov 2020) + +New API: +`VerifyingKey.precompute()` supports `lazy` argument to delay precomputation +to the first time the key is used to verify a signature. + +Doc fixes: +Documentation for the `VerifyingKey.precompute()` method. + +Bug fix: +Make created signatures correct when the hash used is bigger than the curve +order bit size and the curve order is not a multiple of 8 (this affects +only users of custom curves or hashes with output larger than 512 bits). + +Performance: +Speed up library load time by calculating the generator point multiplication +tables the first time the points are used, not when they are initialised. + +Maintenance: +Include Python 3.9 in CI testing. +Test coverage for the `VerifyingKey.precompute()` method. +Small speed-ups for the test suite. + * Release 0.16.0 (27 Aug 2020) New features: @@ -5,11 +191,11 @@ Support for reading and writing private keys in PKCS#8 format. New API: `to_pem` and `to_der` now accept new parameter, `format`, to specify -the format of the encoded files, either the dafault, legacy "ssleay", or +the format of the encoded files, either the default, legacy `ssleay`, or the new `pkcs8` to use PKCS#8. Note that only unencrypted PKCS#8 files are supported. Add `allow_truncate` to `verify` in `VerifyingKey`, it defaults to True, -when specified as False, use of large hashes smaller than curves will be +when specified as False, the use of large hashes smaller than curves will be disallowed (as it was in 0.14.1 and earlier). Bug fix: @@ -24,7 +210,7 @@ as such. Maintenance: Ensure that version checks will work with Python 4. Format the source with black. -Fix uses of `assert_` in test suite. +Fix uses of `assert_` in the test suite. Use newer Ubuntu in Travis to test against OpenSSL 1.1.1 (and thus test the interoperability of ECDH code in Travis). @@ -47,24 +233,24 @@ New API: `ecdsa.ecdh` module and `ECDH` class. `PointJacobi` added. `VerifyingKey.verify_digest`, `SigningKey.sign_digest` and -`SigningKey.sign_digest_deterministic` methods now accept `allow_truncate` -argument to enable use of hashes larger than the curve order. +`SigningKey.sign_digest_deterministic` methods now accept the `allow_truncate` +argument to enable the use of hashes larger than the curve order. `VerifyingKey` `from_pem` and `from_der` now accept `hashfunc` parameter like other `from*` methods. -`VerifyingKey` has `precompute` method now. +`VerifyingKey` has the `precompute` method now. `VerifyingKey.from_public_point` may now not perform validation of public -point when `validate_point=False` argument is passed to method. +point when `validate_point=False` argument is passed to the method. `CurveFp` constructor now accepts the `h` parameter - the cofactor of the -elliptic curve, it's used for selection of algorithm of public point +elliptic curve, it's used for the selection of algorithm of public point verification. Performance: -`randrange` now will now perform much fewer calls to system random number +`randrange` now will perform much fewer calls to the system random number generator. `PointJacobi` introduced and used as the underlying implementation; speeds up the library by a factor of about 20. -Library has now optional dependencies on `gmpy` and `gmpy2`. When they are -availbale, the elliptic curve calculations will be about 3 times faster. +The library has now optional dependencies on `gmpy` and `gmpy2`. When they are +available, the elliptic curve calculations will be about 3 times faster. Maintenance: expected minimum version of `six` module (1.9.0) is now specified explicitly @@ -89,13 +275,13 @@ Make `SigningKey.sign_digest_deterministic` use default object hashfunc when none was provided. `encode_integer` now works for large integers. Make `encode_oid` and `remove_object` correctly handle OBJECT IDENTIFIERs -with large second subidentifier and padding in encoded subidentifiers. +with large second sub-identifier and padding in encoded sub-identifiers. New features: -Deterministic signature methods now accept `extra_entropy` parameter to further -randomise the selection of `k` (the nonce) for signature, as specified in -RFC6979. -Recovery of public key from signature is now supported. +Deterministic signature methods now accept the `extra_entropy` parameter to +further randomise the selection of `k` (the nonce) for signature, as specified +in RFC6979. +Recovery of the public key from signature is now supported. Support for SEC1/X9.62 formatted keys, all three encodings are supported: "uncompressed", "compressed" and "hybrid". Both string, and PEM/DER will automatically accept them, if the size of the key matches the curve. @@ -120,7 +306,7 @@ added. `VerifyingKey`: `__repr__` is now supported Deprecations: -Python 2.5 is not supported any more - dead code removal. +Python 2.5 is not supported anymore - dead code removal. `from ecdsa.keys import *` will now import only objects defined in that module. Trying to decode a malformed point using `VerifyingKey.from_string` will rise now the `MalformedPointError` exception (that inherits from @@ -139,10 +325,10 @@ modular_exp: will emit `DeprecationWarning` Hardening: Deterministic signatures now verify that the signature won't leak private -key through very unlikely selection of `k` value (the nonce). +key through a very unlikely selection of `k` value (the nonce). Nonce bit size hiding was added (hardening against Minerva attack). Please -note that it DOES NOT make library secure against side channel attacks (timing -attacks). +note that it DOES NOT make the library secure against side-channel attacks +(timing attacks). Performance: The public key in key generation is not verified twice now, making key @@ -227,14 +413,15 @@ hashfunc=sha256 in each time they call sign() or verify(). Fix test failure against OpenSSL-1.0.0 (previous versions only worked against openssl-0.9.8 or earlier). Increase python requirement to py2.5 or later -(still no py3 compatibility, but work is underway). Replace use of obsolete +(still no py3 compatibility, but work is underway). Replace the use of obsolete 'sha' library with modern 'hashlib'. Clean up unit test runner (stop using subprocesses). * Release 0.6 (15 Oct 2010) -Small packaging changes: extract version number from git, add 'setup.py test' -command, set exit code correctly on test failure. Fix pyflakes warnings. +Small packaging changes: extract the version number from git, add +'setup.py test' command, set exit code correctly on test failure. Fix pyflakes +warnings. * Release 0.5 (27 Apr 2010) diff --git a/README.md b/README.md index 307785db..0bf02bde 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,31 @@ -# Pure-Python ECDSA - -[![build status](https://travis-ci.org/warner/python-ecdsa.png)](http://travis-ci.org/warner/python-ecdsa) -[![Coverage Status](https://coveralls.io/repos/warner/python-ecdsa/badge.svg)](https://coveralls.io/r/warner/python-ecdsa) -[![condition coverage](https://img.shields.io/badge/condition%20coverage-81%25-yellow)](https://travis-ci.org/warner/python-ecdsa/jobs/626479178#L776) +# Pure-Python ECDSA and ECDH + +[![GitHub CI](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml) +[![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) +[![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) +![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) +![mutation score](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-mutation-score.json) +[![CodeQL](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml) [![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) ![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) -This is an easy-to-use implementation of ECDSA cryptography (Elliptic Curve -Digital Signature Algorithm), implemented purely in Python, released under -the MIT license. With this library, you can quickly create keypairs (signing -key and verifying key), sign messages, and verify the signatures. The keys -and signatures are very short, making them easy to handle and incorporate -into other protocols. +This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) +with support for ECDSA (Elliptic Curve Digital Signature Algorithm), +EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH +(Elliptic Curve Diffie-Hellman), implemented purely in Python, released under +the MIT license. With this library, you can quickly create key pairs (signing +key and verifying key), sign messages, and verify the signatures. You can +also agree on a shared secret key based on exchanged public keys. +The keys and signatures are very short, making them easy to handle and +incorporate into other protocols. + +**NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** ## Features -This library provides key generation, signing, and verifying, for five +This library provides key generation, signing, verifying, and shared secret +derivation for five popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, as known by the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, @@ -25,24 +34,29 @@ the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The "short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, `brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, -`brainpoolP512r1`. +`brainpoolP512r1`. Few of the small curves from SEC standard are also +included (mainly to speed-up testing of the library), those are: +`secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. +Key generation, siging and verifying is also supported for Ed25519 and +Ed448 curves. No other curves are included, but it is not too hard to add support for more curves over prime fields. ## Dependencies This library uses only Python and the 'six' package. It is compatible with -Python 2.6, 2.7 and 3.3+. It also supports execution on the alternative +Python 2.6, 2.7, and 3.6+. It also supports execution on alternative implementations like pypy and pypy3. If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. Either of them can be installed after this library is installed, `python-ecdsa` will detect their presence on start-up and use them automatically. +You should prefer `gmpy2` on Python3 for optimal performance. To run the OpenSSL compatibility tests, the 'openssl' tool must be in your `PATH`. This release has been tested successfully against OpenSSL 0.9.8o, -1.0.0a, 1.0.2f and 1.1.1d (among others). +1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). ## Installation @@ -67,88 +81,113 @@ pip install ecdsa[gmpy] ## Speed -The following table shows how long this library takes to generate keypairs -(`keygen`), to sign data (`sign`), and to verify those signatures (`verify`). +The following table shows how long this library takes to generate key pairs +(`keygen`), to sign data (`sign`), to verify those signatures (`verify`), +to derive a shared secret (`ecdh`), and +to verify the signatures with no key-specific precomputation (`no PC verify`). All those values are in seconds. For convenience, the inverses of those values are also provided: how many keys per second can be generated (`keygen/s`), how many signatures -can be made per second (`sign/s`) and how many signatures can be verified -per second (`verify/s`). The size of raw signature (generally the smallest -way a signature can be encoded) is also provided in the `siglen` column. +can be made per second (`sign/s`), how many signatures can be verified +per second (`verify/s`), how many shared secrets can be derived per second +(`ecdh/s`), and how many signatures with no key specific +precomputation can be verified per second (`no PC verify/s`). The size of raw +signature (generally the smallest +the way a signature can be encoded) is also provided in the `siglen` column. Use `tox -e speed` to generate this table on your own computer. On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: ``` - siglen keygen keygen/s sign sign/s verify verify/s - NIST192p: 48 0.00035s 2893.02 0.00038s 2620.53 0.00069s 1458.92 - NIST224p: 56 0.00043s 2307.11 0.00048s 2092.00 0.00088s 1131.33 - NIST256p: 64 0.00056s 1793.70 0.00061s 1639.87 0.00113s 883.79 - NIST384p: 96 0.00116s 864.33 0.00124s 806.29 0.00233s 429.87 - NIST521p: 132 0.00221s 452.16 0.00234s 427.31 0.00460s 217.19 - SECP256k1: 64 0.00056s 1772.65 0.00061s 1628.73 0.00110s 912.13 - BRAINPOOLP160r1: 40 0.00026s 3801.86 0.00029s 3401.11 0.00052s 1930.47 - BRAINPOOLP192r1: 48 0.00034s 2925.73 0.00038s 2634.34 0.00070s 1438.06 - BRAINPOOLP224r1: 56 0.00044s 2287.98 0.00048s 2083.87 0.00088s 1137.52 - BRAINPOOLP256r1: 64 0.00056s 1774.11 0.00061s 1628.25 0.00112s 890.71 - BRAINPOOLP320r1: 80 0.00081s 1238.18 0.00087s 1146.71 0.00151s 661.95 - BRAINPOOLP384r1: 96 0.00117s 855.47 0.00124s 804.56 0.00241s 414.83 - BRAINPOOLP512r1: 128 0.00223s 447.99 0.00234s 427.49 0.00437s 229.09 + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 + NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 + NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 + NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 + NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 + SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 + BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 + BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 + BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 + BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 + BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 + BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 + BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 + SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 + SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 + SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 + SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 + Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 + Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 ecdh ecdh/s - NIST192p: 0.00110s 910.70 - NIST224p: 0.00143s 701.17 - NIST256p: 0.00178s 560.44 - NIST384p: 0.00383s 261.03 - NIST521p: 0.00745s 134.23 - SECP256k1: 0.00168s 596.23 - BRAINPOOLP160r1: 0.00085s 1174.02 - BRAINPOOLP192r1: 0.00113s 883.47 - BRAINPOOLP224r1: 0.00145s 687.82 - BRAINPOOLP256r1: 0.00195s 514.03 - BRAINPOOLP320r1: 0.00277s 360.80 - BRAINPOOLP384r1: 0.00412s 242.58 - BRAINPOOLP512r1: 0.00787s 127.12 + NIST192p: 0.00104s 964.89 + NIST224p: 0.00134s 748.63 + NIST256p: 0.00170s 587.08 + NIST384p: 0.00352s 283.90 + NIST521p: 0.00717s 139.51 + SECP256k1: 0.00154s 648.40 + BRAINPOOLP160r1: 0.00082s 1220.70 + BRAINPOOLP192r1: 0.00105s 956.75 + BRAINPOOLP224r1: 0.00136s 734.52 + BRAINPOOLP256r1: 0.00178s 563.32 + BRAINPOOLP320r1: 0.00252s 397.23 + BRAINPOOLP384r1: 0.00376s 266.27 + BRAINPOOLP512r1: 0.00733s 136.35 + SECP112r1: 0.00046s 2180.40 + SECP112r2: 0.00045s 2229.14 + SECP128r1: 0.00054s 1868.15 + SECP160r1: 0.00080s 1243.98 ``` To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. On the same machine I'm getting the following performance with `gmpy2`: ``` - siglen keygen keygen/s sign sign/s verify verify/s - NIST192p: 48 0.00017s 5945.50 0.00018s 5544.66 0.00033s 3002.54 - NIST224p: 56 0.00021s 4742.14 0.00022s 4463.52 0.00044s 2248.59 - NIST256p: 64 0.00024s 4155.73 0.00025s 3994.28 0.00047s 2105.34 - NIST384p: 96 0.00041s 2415.06 0.00043s 2316.41 0.00085s 1177.18 - NIST521p: 132 0.00072s 1391.14 0.00074s 1359.63 0.00140s 716.31 - SECP256k1: 64 0.00024s 4216.50 0.00025s 3994.52 0.00047s 2120.57 - BRAINPOOLP160r1: 40 0.00014s 7038.99 0.00015s 6501.55 0.00029s 3397.79 - BRAINPOOLP192r1: 48 0.00017s 5983.18 0.00018s 5626.08 0.00035s 2843.62 - BRAINPOOLP224r1: 56 0.00021s 4727.54 0.00022s 4464.86 0.00043s 2326.84 - BRAINPOOLP256r1: 64 0.00024s 4221.00 0.00025s 4010.26 0.00049s 2046.40 - BRAINPOOLP320r1: 80 0.00032s 3142.14 0.00033s 3009.15 0.00061s 1652.88 - BRAINPOOLP384r1: 96 0.00041s 2415.98 0.00043s 2340.35 0.00083s 1198.77 - BRAINPOOLP512r1: 128 0.00064s 1567.27 0.00066s 1526.33 0.00127s 788.51 + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 + NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 + NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 + NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 + NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 + SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 + BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 + BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 + BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 + BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 + BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 + BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 + BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 + SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 + SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 + SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 + SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 + Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 + Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 ecdh ecdh/s - NIST192p: 0.00051s 1960.26 - NIST224p: 0.00067s 1502.97 - NIST256p: 0.00073s 1376.12 - NIST384p: 0.00132s 758.68 - NIST521p: 0.00231s 433.23 - SECP256k1: 0.00072s 1387.18 - BRAINPOOLP160r1: 0.00042s 2366.60 - BRAINPOOLP192r1: 0.00049s 2026.80 - BRAINPOOLP224r1: 0.00067s 1486.52 - BRAINPOOLP256r1: 0.00076s 1310.31 - BRAINPOOLP320r1: 0.00101s 986.16 - BRAINPOOLP384r1: 0.00131s 761.35 - BRAINPOOLP512r1: 0.00211s 473.30 + NIST192p: 0.00050s 1985.70 + NIST224p: 0.00066s 1524.16 + NIST256p: 0.00071s 1413.07 + NIST384p: 0.00127s 788.89 + NIST521p: 0.00230s 434.85 + SECP256k1: 0.00071s 1409.95 + BRAINPOOLP160r1: 0.00042s 2374.65 + BRAINPOOLP192r1: 0.00051s 1960.01 + BRAINPOOLP224r1: 0.00066s 1518.37 + BRAINPOOLP256r1: 0.00071s 1399.90 + BRAINPOOLP320r1: 0.00100s 997.21 + BRAINPOOLP384r1: 0.00129s 777.51 + BRAINPOOLP512r1: 0.00210s 475.99 + SECP112r1: 0.00022s 4457.70 + SECP112r2: 0.00024s 4252.33 + SECP128r1: 0.00028s 3589.31 + SECP160r1: 0.00043s 2305.02 ``` (there's also `gmpy` version, execute it using `tox -e speedgmpy`) For comparison, a highly optimised implementation (including curve-specific -assembly for some curves), like the one in OpenSSL 1.1.1d, provides following -performance numbers on the same machine. +assembly for some curves), like the one in OpenSSL 1.1.1d, provides the +following performance numbers on the same machine. Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: ``` sign verify sign/s verify/s @@ -161,6 +200,10 @@ Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 + sign verify sign/s verify/s + 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 + 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 + op op/s 192 bits ecdh (nistp192) 0.0002s 4853.4 224 bits ecdh (nistp224) 0.0001s 15252.1 @@ -186,9 +229,9 @@ following lengths (in bytes): In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a [message to sci.crypt][1], available from his [download site][2]. In 2010, Brian Warner wrote a wrapper around this code, to make it a bit easier and -safer to use. Hubert Kario then included an implementation of elliptic curve -cryptography that uses Jacobian coordinates internally, improving performance -about 20-fold. You are looking at the README for this wrapper. +safer to use. In 2020, Hubert Kario included an implementation of elliptic +curve cryptography that uses Jacobian coordinates internally, improving +performance about 20-fold. You are looking at the README for this wrapper. [1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html [2]: http://webpages.charter.net/curryfans/peter/downloads.html @@ -199,14 +242,16 @@ To run the full test suite, do this: tox -e coverage -On an Intel Core i7 4790K @ 4.0GHz, the tests take about 16 seconds to execute. +On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. The test suite uses [`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some inherent variability in the test suite execution time. -One part of `test_pyecdsa.py` checks compatibility with OpenSSL, by -running the "openssl" CLI tool, make sure it's in your `PATH` if you want -to test compatibility with it. +One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with +OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if +you want to test compatibility with it (if OpenSSL is missing, too old, or +doesn't support all the curves supported in upstream releases you will see +skipped tests in the above `coverage` run). ## Security @@ -216,22 +261,22 @@ OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such a wrapper. The primary use-case of this library is as a portable library for interoperability testing and as a teaching tool. -**This library does not protect against side channel attacks.** +**This library does not protect against side-channel attacks.** -Do not allow attackers to measure how long it takes you to generate a keypair +Do not allow attackers to measure how long it takes you to generate a key pair or sign a message. Do not allow attackers to run code on the same physical -machine when keypair generation or signing is taking place (this includes +machine when key pair generation or signing is taking place (this includes virtual machines). Do not allow attackers to measure how much power your -computer uses while generating the keypair or signing a message. Do not allow +computer uses while generating the key pair or signing a message. Do not allow attackers to measure RF interference coming from your computer while generating -a keypair or signing a message. Note: just loading the private key will cause -keypair generation. Other operations or attack vectors may also be +a key pair or signing a message. Note: just loading the private key will cause +key pair generation. Other operations or attack vectors may also be vulnerable to attacks. **For a sophisticated attacker observing just one operation with a private key will be sufficient to completely reconstruct the private key**. Please also note that any Pure-python cryptographic library will be vulnerable -to the same side channel attacks. This is because Python does not provide +to the same side-channel attacks. This is because Python does not provide side-channel secure primitives (with the exception of [`hmac.compare_digest()`][3]), making side-channel secure programming impossible. @@ -332,7 +377,7 @@ vk2 = VerifyingKey.from_pem(vk_pem) There are a couple of different ways to compute a signature. Fundamentally, ECDSA takes a number that represents the data being signed, and returns a pair of numbers that represent the signature. The `hashfunc=` argument to -`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into +`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a fixed-length digest, which is then turned into a number that ECDSA can sign, and both sign and verify must use the same approach. The default value is `hashlib.sha1`, but if you use NIST256p or a longer curve, you can use @@ -490,7 +535,7 @@ failures of the entropy source. ## Examples -Create a NIST192p keypair and immediately save both to disk: +Create a NIST192p key pair and immediately save both to disk: ```python from ecdsa import SigningKey @@ -533,7 +578,7 @@ except BadSignatureError: print "BAD SIGNATURE" ``` -Create a NIST521p keypair: +Create a NIST521p key pair: ```python from ecdsa import SigningKey, NIST521p @@ -580,7 +625,7 @@ vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) print(vk.to_string("uncompressed").hex()) ``` -ECDH key exchange with remote party +ECDH key exchange with remote party: ```python from ecdsa import ECDH, NIST256p diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..f5dbcb48 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,48 @@ +# Security Policy + +## Supported Versions + +Only the latest released version is supported. +Alpha and beta releases are always unsupported with security fixes. + +The project uses semantic versioning, as such, minor version changes are API compatible. + +| Version | Supported | +| -------- | ------------------ | +| 0.18.x | :white_check_mark: | +| < 0.18 | :x: | + +## Support Scope + +This library was not designed with security in mind. If you are processing data that needs +to be protected we suggest you use a quality wrapper around OpenSSL. +[`pyca/cryptography`](https://cryptography.io/) is one example of such a wrapper. +The primary use-case of this library is as a portable library for interoperability testing +and as a teaching tool. + +**This library does not protect against side-channel attacks.** + +Do not allow attackers to measure how long it takes you to generate a key pair or sign a message. +Do not allow attackers to run code on the same physical machine when key pair generation or +signing is taking place (this includes virtual machines). +Do not allow attackers to measure how much power your computer uses while generating the key pair +or signing a message. Do not allow attackers to measure RF interference coming from your computer +while generating a key pair or signing a message. Note: just loading the private key will cause +key pair generation. Other operations or attack vectors may also be vulnerable to attacks. +For a sophisticated attacker observing just one operation with a private key will be sufficient +to completely reconstruct the private key. + +Fixes for side-channel vulerabilities will not be developed. + +Please also note that any Pure-python cryptographic library will be vulnerable to the same +side-channel attacks. This is because Python does not provide side-channel secure primitives +(with the exception of [`hmac.compare_digest()`](https://docs.python.org/3/library/hmac.html#hmac.compare_digest)), +making side-channel secure programming impossible. + +This library depends upon a strong source of random numbers. Do not use it on a system +where `os.urandom()` does not provide cryptographically secure random numbers. + +## Reporting a Vulnerability + +If you find a security vulnerability in this library, you can report it using the "Report a vulnerability" button on the Security tab in github UI. +Alternatively, you can contact the project maintainer at hkario at redhat dot com. diff --git a/build-requirements-2.6.txt b/build-requirements-2.6.txt index 2d0d14e9..ee3d22fd 100644 --- a/build-requirements-2.6.txt +++ b/build-requirements-2.6.txt @@ -1,6 +1,10 @@ tox -coveralls<1.3.0 +inflect<0.3.1 +pyopenssl<18 +cffi<1.14 +git+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls idna<2.8 unittest2 hypothesis<3 -coverage==4.5.4 +coverage +mock==2.0.0 diff --git a/build-requirements-2.7.txt b/build-requirements-2.7.txt new file mode 100644 index 00000000..b053e03a --- /dev/null +++ b/build-requirements-2.7.txt @@ -0,0 +1,5 @@ +tox +git+https://github.com/tomato42/coveralls-python.git@add-py26#egg=coveralls +hypothesis +pytest>=4.6.0 +coverage diff --git a/build-requirements-3.3.txt b/build-requirements-3.3.txt deleted file mode 100644 index 9d486f78..00000000 --- a/build-requirements-3.3.txt +++ /dev/null @@ -1,9 +0,0 @@ -python-coveralls -pluggy<0.6 -tox<3 -wheel<0.30 -virtualenv==15.2.0 -enum34 -hypothesis<3.44 -coverage==4.5.4 -urllib3<=1.25.8 diff --git a/build-requirements-3.4.txt b/build-requirements-3.4.txt deleted file mode 100644 index 4d95aae5..00000000 --- a/build-requirements-3.4.txt +++ /dev/null @@ -1,6 +0,0 @@ -tox -python-coveralls -hypothesis -pytest>=4.6.0 -PyYAML<5.3 -coverage==4.5.4 diff --git a/build-requirements.txt b/build-requirements.txt index a7b75c1a..a35f6aba 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,5 +1,5 @@ tox -python-coveralls +coveralls hypothesis pytest>=4.6.0 -coverage==4.5.4 +coverage diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..6c163acf --- /dev/null +++ b/conftest.py @@ -0,0 +1,11 @@ +def pytest_addoption(parser): + parser.addoption( + "--fast", action="store_true", default=False, help="run tests fast" + ) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "slow: mark test as slow to run (deselect with '-m \"not slow\"')", + ) diff --git a/cosmic-ray-12way.sh b/cosmic-ray-12way.sh new file mode 100644 index 00000000..7b16fb2d --- /dev/null +++ b/cosmic-ray-12way.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +cosmic-ray init cosmic-ray-12way.toml session.sqlite +cosmic-ray baseline --session-file session.baseline.sqlite cosmic-ray-12way.toml +cr-report --show-output session.baseline.sqlite +# some mutations cause huge memory use, so put it in a cgroup +# systemd-run --user --scope -p MemoryMax=8G -p MemoryHigh=8G cr-http-workers cosmic-ray-12way.toml . +cr-http-workers cosmic-ray-12way.toml . +cosmic-ray exec cosmic-ray-12way.toml session.sqlite +cr-report session.sqlite +cr-html session.sqlite > session.html +cr-rate --estimate --fail-over 29 --confidence 99.9 session.sqlite diff --git a/cosmic-ray-12way.toml b/cosmic-ray-12way.toml new file mode 100644 index 00000000..c1e8e55c --- /dev/null +++ b/cosmic-ray-12way.toml @@ -0,0 +1,28 @@ +[cosmic-ray] +module-path = "src" +timeout = 20.0 +excluded-modules = ['src/ecdsa/_sha3.py', 'src/ecdsa/_version.py', 'src/ecdsa/test*'] +test-command = "pytest --timeout=30 -x --fast -m 'not slow' src/" + +[cosmic-ray.distributor] +name = "http" + +[cosmic-ray.distributor.http] +worker-urls = [ + "http://localhost:9870", + "http://localhost:9871", + "http://localhost:9872", + "http://localhost:9873", + "http://localhost:9874", + "http://localhost:9875", + "http://localhost:9876", + "http://localhost:9877", + "http://localhost:9878", + "http://localhost:9879", + "http://localhost:9880", + "http://localhost:9881", + "http://localhost:9882" +] + +[cosmic-ray.filters.git-filter] +branch = "master" diff --git a/cosmic-ray.sh b/cosmic-ray.sh new file mode 100644 index 00000000..51181a3a --- /dev/null +++ b/cosmic-ray.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +cosmic-ray init cosmic-ray.toml session.sqlite +cosmic-ray baseline --session-file session.baseline.sqlite cosmic-ray.toml +cr-report --show-output session.baseline.sqlite +# some mutations cause huge memory use, so put it in a cgroup +# systemd-run --user --scope -p MemoryMax=2G -p MemoryHigh=2G cosmic-ray exec cosmic-ray.toml session.sqlite +cosmic-ray exec cosmic-ray.toml session.sqlite +cr-report session.sqlite +cr-html session.sqlite > session.html +cr-rate --estimate --fail-over 29 --confidence 99.9 session.sqlite diff --git a/cosmic-ray.toml b/cosmic-ray.toml new file mode 100644 index 00000000..af40c3cb --- /dev/null +++ b/cosmic-ray.toml @@ -0,0 +1,11 @@ +[cosmic-ray] +module-path = "src" +timeout = 20.0 +excluded-modules = ['src/ecdsa/_sha3.py', 'src/ecdsa/_version.py', 'src/ecdsa/test*'] +test-command = "pytest --timeout 30 -x --fast -m 'not slow' src/" + +[cosmic-ray.distributor] +name = "local" + +[cosmic-ray.filters.git-filter] +branch = "master" diff --git a/diff-instrumental.py b/diff-instrumental.py index d8fe376a..0cac0426 100644 --- a/diff-instrumental.py +++ b/diff-instrumental.py @@ -6,11 +6,12 @@ max_difference = 0 read_location = None save_location = None +raw = False argv = sys.argv[1:] opts, args = getopt.getopt( - argv, "s:r:", ["fail-under=", "max-difference=", "save=", "read="] + argv, "s:r:", ["fail-under=", "max-difference=", "save=", "read=", "raw"] ) if args: raise ValueError("Unexpected parameters: {0}".format(args)) @@ -23,6 +24,8 @@ fail_under = float(arg) / 100.0 elif opt == "--max-difference": max_difference = float(arg) / 100.0 + elif opt == "--raw": + raw = True else: raise ValueError("Unknown option: {0}".format(opt)) @@ -49,7 +52,10 @@ with open(save_location, "w") as f: f.write("{0:1.40f}".format(coverage)) -print("Coverage: {0:6.2f}%".format(coverage * 100)) +if raw: + print("{0:6.2f}".format(coverage * 100)) +else: + print("Coverage: {0:6.2f}%".format(coverage * 100)) if read_location: print("Difference: {0:6.2f}%".format((old_coverage - coverage) * 100)) diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d0c3cbf1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..6247f7e2 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..6c5d5d44 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +sphinx-rtd-theme diff --git a/docs/source/basics.rst b/docs/source/basics.rst new file mode 100644 index 00000000..b71e925c --- /dev/null +++ b/docs/source/basics.rst @@ -0,0 +1,162 @@ +====================== +Basics of ECC handling +====================== + +The :term:`ECC`, as any asymmetric cryptography system, deals with private +keys and public keys. Private keys are generally used to create signatures, +and are kept, as the name suggest, private. That's because possession of a +private key allows creating a signature that can be verified with a public key. +If the public key is associated with an identity (like a person or an +institution), possession of the private key will allow to impersonate +that identity. + +The public keys on the other hand are widely distributed, and they don't +have to be kept private. The primary purpose of them, is to allow +checking if a given signature was made with the associated private key. + +Number representations +====================== + +On a more low level, the private key is a single number, usually the +size of the curve size: a NIST P-256 private key will have a size of 256 bits, +though as it needs to be selected randomly, it may be a slightly smaller +number (255-bit, 248-bit, etc.). +Public points are a pair of numbers. That pair specifies a point on an +elliptic curve (a pair of integers that satisfy the curve equation). +Those two numbers are similarly close in size to the curve size, so both the +``x`` and ``y`` coordinate of a NIST P-256 curve will also be around 256 bit in +size. + +.. note:: + To be more precise, the size of the private key is related to the + curve *order*, i.e. the number of points on a curve. The coordinates + of the curve depend on the *field* of the curve, which usually means the + size of the *prime* used for operations on points. While the *order* and + the *prime* size are related and fairly close in size, it's possible + to have a curve where either of them is larger by a bit (i.e. + it's possible to have a curve that uses a 256 bit *prime* that has a 257 bit + *order*). + +Since normally computers work with much smaller numbers, like 32 bit or 64 bit, +we need to use special approaches to represent numbers that are hundreds of +bits large. + +First is to decide if the numbers should be stored in a big +endian format, or in little endian format. In big endian, the most +significant bits are stored first, so a number like :math:`2^{16}` is saved +as a three bytes: byte with value 1 and two bytes with value 0. +In little endian format the least significant bits are stored first, so +the number like :math:`2^{16}` would be stored as three bytes: +first two bytes with value 0, than a byte with value 1. + +For :term:`ECDSA` big endian encoding is usually used, for :term:`EdDSA` +little endian encoding is usually used. + +Secondly, we need to decide if the numbers need to be stored as fixed length +strings (zero padded if necessary), or if they should be stored with +minimal number of bytes necessary. +That depends on the format and place it's used, some require strict +sizes (so even if the number encoded is 1, but the curve used is 128 bit large, +that number 1 still needs to be encoded with 16 bytes, with fifteen most +significant bytes equal zero). + +Public key encoding +=================== + +Generally, public keys (i.e. points) are expressed as fixed size byte strings. + +While public keys can be saved as two integers, one to represent the +``x`` coordinate and one to represent ``y`` coordinate, that actually +provides a lot of redundancy. Because of the specifics of elliptic curves, +for every valid ``x`` value there are only two valid ``y`` values. +Moreover, if you have an ``x`` value, you can compute those two possible +``y`` values (if they exist). +As such, it's possible to save just the ``x`` coordinate and the sign +of the ``y`` coordinate (as the two possible values are negatives of +each-other: :math:`y_1 == -y_2`). + +That gives us few options to represent the public point, the most common are: + +1. As a concatenation of two fixed-length big-endian integers, so called + :term:`raw encoding`. +2. As a concatenation of two fixed-length big-endian integers prefixed with + the type of the encoding, so called :term:`uncompressed` point + representation (the type is represented by a 0x04 byte). +3. As a fixed-length big-endian integer representing the ``x`` coordinate + prefixed with the byte representing the combined type of the encoding + and the sign of the ``y`` coordinate, so called :term:`compressed` + point representation (the type is then represented by a 0x02 or a 0x03 + byte). + +Interoperable file formats +========================== + +Now, while we can save the byte strings as-is and "remember" which curve +was used to generate those private and public keys, interoperability usually +requires to also save information about the curve together with the +corresponding key. Here too there are many ways to do it: +save the parameters of the used curve explicitly, use the name of the +well-known curve as a string, use a numerical identifier of the well-known +curve, etc. + +For public keys the most interoperable format is the one described +in RFC5912 (look for SubjectPublicKeyInfo structure). +For private keys, the RFC5915 format (also known as the ssleay format) +and the PKCS#8 format (described in RFC5958) are the most popular. + +All three formats effectively support two ways of providing the information +about the curve used: by specifying the curve parameters explicitly or +by specifying the curve using ASN.1 OBJECT IDENTIFIER (OID), which is +called ``named_curve``. ASN.1 OIDs are a hierarchical system of representing +types of objects, for example, NIST P-256 curve is identified by the +1.2.840.10045.3.1.7 OID (in dotted-decimal formatting of the OID, also +known by the ``prime256v1`` OID node name or short name). Those OIDs +uniquely, identify a particular curve, but the receiver needs to know +which numerical OID maps to which curve parameters. Thus the prospect of +using the explicit encoding, where all the needed parameters are provided +is tempting, the downside is that curve parameters may specify a *weak* +curve, which is easy to attack and break (that is to deduce the private key +from the public key). To verify curve parameters is complex and computationally +expensive, thus generally protocols use few specific curves and require +all implementations to carry the parameters of them. As such, use of +``named_curve`` parameters is generally recommended. + +All of the mentioned formats specify a binary encoding, called DER. That +encoding uses bytes with all possible numerical values, which means it's not +possible to embed it directly in text files. For uses where it's useful to +limit bytes to printable characters, so that the keys can be embedded in text +files or text-only protocols (like email), the PEM formatting of the +DER-encoded data can be used. The PEM formatting is just a base64 encoding +with appropriate header and footer. + +Signature formats +================= + +Finally, ECDSA signatures at the lowest level are a pair of numbers, usually +called ``r`` and ``s``. While they are the ``x`` coordinates of special +points on the curve, they are saved modulo *order* of the curve, not +modulo *prime* of the curve (as a coordinate needs to be). + +That again means we have multiple ways of encoding those two numbers. +The two most popular formats are to save them as a concatenation of big-endian +integers of fixed size (determined by the curve *order*) or as a DER +structure with two INTEGERS. +The first of those is called the :term:``raw encoding`` inside the Python +ecdsa library. + +As ASN.1 signature format requires the encoding of INTEGERS, and DER INTEGERs +must use the fewest possible number of bytes, a numerically small value of +``r`` or ``s`` will require fewer +bytes to represent in the DER structure. Thus, DER encoding isn't fixed +size for a given curve, but has a maximum possible size. + +.. note:: + + As DER INTEGER uses so-called two's complement representation of + numbers, the most significant bit of the most significant byte + represents the *sign* of the number. If that bit is set, then the + number is considered to be negative. Thus, to represent a number like + 255, which in binary representation is 0b11111111 (i.e. a byte with all + bits set high), the DER encoding of it will require two bytes, one + zero byte to make sure the sign bit is 0, and a byte with value 255 to + encode the numerical value of the integer. diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..d86ea492 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,72 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("../../src")) + + +# -- Project information ----------------------------------------------------- + +project = "python-ecdsa" +copyright = "2021, Brian Warner and Hubert Kario" +author = "Brian Warner and Hubert Kario" + +# The full version, including alpha/beta/rc tags +release = "0.17.0" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.imgmath", + "sphinx.ext.viewcode", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +todo_include_todos = False + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://docs.python.org/", None), +} + +autodoc_default_options = { + "undoc-members": True, + "inherited-members": True, +} diff --git a/docs/source/ec_arithmetic.rst b/docs/source/ec_arithmetic.rst new file mode 100644 index 00000000..39a13c97 --- /dev/null +++ b/docs/source/ec_arithmetic.rst @@ -0,0 +1,137 @@ +========================= +Elliptic Curve arithmetic +========================= + +The python-ecdsa also provides generic API for performing operations on +elliptic curve points. + +.. warning:: + + This is documentation of a very low-level API, if you want to + handle keys or signatures you should look at documentation of + the :py:mod:`~ecdsa.keys` module. + +Short Weierstrass curves +======================== + +There are two low-level implementations for +:term:`short Weierstrass curves `: +:py:class:`~ecdsa.ellipticcurve.Point` and +:py:class:`~ecdsa.ellipticcurve.PointJacobi`. + +Both of them use the curves specified using the +:py:class:`~ecdsa.ellipticcurve.CurveFp` object. + +You can either provide your own curve parameters or use one of the predefined +curves. +For example, to define a curve :math:`y^2 = x^3 + 1 * x + 4 \text{ mod } 5` use +code like this: + +.. code:: python + + from ecdsa.ellipticcurve import CurveFp + custom_curve = CurveFp(5, 1, 4) + +The predefined curves are specified in the :py:mod:`~ecdsa.ecdsa` module, +but it's much easier to use the helper functions (and proper names) +from the :py:mod:`~ecdsa.curves` module. + +For example, to get the curve parameters for the NIST P-256 curve use this +code: + +.. code:: python + + from ecdsa.curves import NIST256p + curve = NIST256p.curve + +.. tip:: + + You can also use :py:class:`~ecdsa.curves.Curve` to get the curve + parameters from a PEM or DER file. You can also use + :py:func:`~ecdsa.curves.curve_by_name` to get a curve by specifying its + name. + Or use the + :py:func:`~ecdsa.curves.find_curve` to get a curve by specifying its + ASN.1 object identifier (OID). + +Affine coordinates +------------------ + +After taking hold of curve parameters you can create a point on the +curve. The :py:class:`~ecdsa.ellipticcurve.Point` uses affine coordinates, +i.e. the :math:`x` and :math:`y` from the curve equation directly. + +To specify a point (1, 1) on the ``custom_curve`` you can use this code: + +.. code:: python + + from ecdsa.ellipticcurve import Point + point_a = Point(custom_curve, 1, 1) + +Then it's possible to either perform scalar multiplication: + +.. code:: python + + point_b = point_a * 3 + +Or specify other points and perform addition: + +.. code:: python + + point_b = Point(custom_curve, 3, 2) + point_c = point_a + point_b + +To get the affine coordinates of the point, call the ``x()`` and ``y()`` +methods of the object: + +.. code:: python + + print("x: {0}, y: {1}".format(point_c.x(), point_c.y())) + +Projective coordinates +---------------------- + +When using the Jacobi coordinates, the point is defined by 3 integers, +which are related to the :math:`x` and :math:`y` in the following way: + +.. math:: + + x = X/Z^2 \\ + y = Y/Z^3 + +That means that if you have point in affine coordinates, it's possible +to convert them to Jacobi by simply assuming :math:`Z = 1`. + +So the same points can be specified as so: + +.. code:: python + + from ecdsa.ellipticcurve import PointJacobi + point_a = PointJacobi(custom_curve, 1, 1, 1) + point_b = PointJacobi(custom_curve, 3, 2, 1) + + +.. note:: + + Unlike the :py:class:`~ecdsa.ellipticcurve.Point`, the + :py:class:`~ecdsa.ellipticcurve.PointJacobi` does **not** check if the + coordinates specify a valid point on the curve as that operation is + computationally expensive for Jacobi coordinates. + If you want to verify if they specify a valid + point, you need to convert the point to affine coordinates and use the + :py:meth:`~ecdsa.ellipticcurve.CurveFp.contains_point` method. + +Then all the operations work exactly the same as with regular +:py:class:`~ecdsa.ellipticcurve.Point` implementation. +While it's not possible to get the internal :math:`X`, :math:`Y`, and :math:`Z` +coordinates, it's possible to get the affine projection just like with +the regular implementation: + +.. code:: python + + point_c = point_a + point_b + print("x: {0}, y: {1}".format(point_c.x(), point_c.y())) + +All the other operations, like scalar multiplication or point addition work +on projective points the same as with affine representation, but they +are much more effective computationally. diff --git a/docs/source/ecdsa.curves.rst b/docs/source/ecdsa.curves.rst new file mode 100644 index 00000000..d2ff907b --- /dev/null +++ b/docs/source/ecdsa.curves.rst @@ -0,0 +1,7 @@ +ecdsa.curves module +=================== + +.. automodule:: ecdsa.curves + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.der.rst b/docs/source/ecdsa.der.rst new file mode 100644 index 00000000..28c6c592 --- /dev/null +++ b/docs/source/ecdsa.der.rst @@ -0,0 +1,7 @@ +ecdsa.der module +================ + +.. automodule:: ecdsa.der + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.ecdh.rst b/docs/source/ecdsa.ecdh.rst new file mode 100644 index 00000000..1d125f80 --- /dev/null +++ b/docs/source/ecdsa.ecdh.rst @@ -0,0 +1,7 @@ +ecdsa.ecdh module +================= + +.. automodule:: ecdsa.ecdh + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.ecdsa.rst b/docs/source/ecdsa.ecdsa.rst new file mode 100644 index 00000000..794dd11b --- /dev/null +++ b/docs/source/ecdsa.ecdsa.rst @@ -0,0 +1,7 @@ +ecdsa.ecdsa module +================== + +.. automodule:: ecdsa.ecdsa + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.eddsa.rst b/docs/source/ecdsa.eddsa.rst new file mode 100644 index 00000000..1b1c3f4e --- /dev/null +++ b/docs/source/ecdsa.eddsa.rst @@ -0,0 +1,7 @@ +ecdsa.eddsa module +================== + +.. automodule:: ecdsa.eddsa + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.ellipticcurve.rst b/docs/source/ecdsa.ellipticcurve.rst new file mode 100644 index 00000000..334dee5e --- /dev/null +++ b/docs/source/ecdsa.ellipticcurve.rst @@ -0,0 +1,7 @@ +ecdsa.ellipticcurve module +========================== + +.. automodule:: ecdsa.ellipticcurve + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.errors.rst b/docs/source/ecdsa.errors.rst new file mode 100644 index 00000000..839e560c --- /dev/null +++ b/docs/source/ecdsa.errors.rst @@ -0,0 +1,7 @@ +ecdsa.errors module +=================== + +.. automodule:: ecdsa.errors + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.keys.rst b/docs/source/ecdsa.keys.rst new file mode 100644 index 00000000..b470715a --- /dev/null +++ b/docs/source/ecdsa.keys.rst @@ -0,0 +1,7 @@ +ecdsa.keys module +================= + +.. automodule:: ecdsa.keys + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.numbertheory.rst b/docs/source/ecdsa.numbertheory.rst new file mode 100644 index 00000000..569b39bd --- /dev/null +++ b/docs/source/ecdsa.numbertheory.rst @@ -0,0 +1,7 @@ +ecdsa.numbertheory module +========================= + +.. automodule:: ecdsa.numbertheory + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.rfc6979.rst b/docs/source/ecdsa.rfc6979.rst new file mode 100644 index 00000000..3b891f52 --- /dev/null +++ b/docs/source/ecdsa.rfc6979.rst @@ -0,0 +1,7 @@ +ecdsa.rfc6979 module +==================== + +.. automodule:: ecdsa.rfc6979 + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/ecdsa.rst b/docs/source/ecdsa.rst new file mode 100644 index 00000000..00b49361 --- /dev/null +++ b/docs/source/ecdsa.rst @@ -0,0 +1,25 @@ +ecdsa package +============= + +.. automodule:: ecdsa + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + ecdsa.curves + ecdsa.der + ecdsa.ecdh + ecdsa.ecdsa + ecdsa.eddsa + ecdsa.ellipticcurve + ecdsa.errors + ecdsa.keys + ecdsa.numbertheory + ecdsa.rfc6979 + ecdsa.util diff --git a/docs/source/ecdsa.util.rst b/docs/source/ecdsa.util.rst new file mode 100644 index 00000000..9ea91d1d --- /dev/null +++ b/docs/source/ecdsa.util.rst @@ -0,0 +1,7 @@ +ecdsa.util module +================= + +.. automodule:: ecdsa.util + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/glossary.rst b/docs/source/glossary.rst new file mode 100644 index 00000000..4aebb532 --- /dev/null +++ b/docs/source/glossary.rst @@ -0,0 +1,92 @@ +.. _glossary: + +Glossary +======== + +.. glossary:: + :sorted: + + ECC + Elliptic Curve Cryptography, a term for all the different ways of using + elliptic curves in cryptography. Also combined term for :term:`ECDSA`, + :term:`EdDSA`, :term:`ECDH`. + + ECDSA + Elliptic Curve Digital Signature Algorithm + + EdDSA + Edwards curve based Digital Signature Algorithm, the alternative + digital signature algorithm that's used for Curve25519 or Curve448 + + ECDH + Elliptic Curve Diffie-Hellman + + raw encoding + Conversion of public, private keys and signatures (which in + mathematical sense are integers or pairs of integers) to strings of + bytes that does not use any special tags or encoding rules. + For any given curve, all keys of the same type or signatures will be + encoded to byte strings of the same length. In more formal sense, + the integers are encoded as big-endian, constant length byte strings, + where the string length is determined by the curve order (e.g. + for NIST256p the order is 256 bits long, so the private key will be 32 + bytes long while public key will be 64 bytes long). The encoding of a + single integer is zero-padded on the left if the numerical value is + low. In case of public keys and signatures, which are comprised of two + integers, the integers are simply concatenated. + + uncompressed + The most common formatting specified in PKIX standards. Specified in + X9.62 and SEC1 standards. The only difference between it and + :term:`raw encoding` is the prepending of a 0x04 byte. Thus an + uncompressed NIST256p public key encoding will be 65 bytes long. + + compressed + The public point representation that uses half of bytes of the + :term:`uncompressed` encoding (rounded up). It uses the first byte of + the encoding to specify the sign of the y coordinate and encodes the + x coordinate as-is. The first byte of the encoding is equal to + 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33 + bytes long. + + hybrid + A combination of :term:`uncompressed` and :term:`compressed` encodings. + Both x and y coordinates are stored just as in :term:`compressed` + encoding, but the first byte reflects the sign of the y coordinate. The + first byte of the encoding will be equal to 0x06 or 0x7. Hybrid + encoding of NIST256p public key will be 65 bytes long. + + PEM + The acronym stands for Privacy Enhanced Mail, but currently it is used + primarily as the way to encode :term:`DER` objects into text that can + be either easily copy-pasted or transferred over email. + It uses headers like ``-----BEGIN -----`` and footers + like ``-----END -----`` to separate multiple + types of objects in the same file or the object from the surrounding + comments. The actual object stored is base64 encoded. + + DER + Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects + deterministically and uniquely into byte strings. + + ASN.1 + Abstract Syntax Notation 1 is a standard description language for + specifying serialisation and deserialisation of data structures in a + portable and cross-platform way. + + bytes-like object + All the types that implement the buffer protocol. That includes + ``str`` (only on python2), ``bytes``, ``bytearray``, ``array.array`` + and ``memoryview`` of those objects. + Please note that ``array.array`` serialisation (converting it to byte + string) is endianess dependant! Signature computed over ``array.array`` + of integers on a big-endian system will not be verified on a + little-endian system and vice-versa. + + set-like object + All the types that support the ``in`` operator, like ``list``, + ``tuple``, ``set``, ``frozenset``, etc. + + short Weierstrass curve + A curve with the curve equation: :math:`y^2=x^3+ax+b`. Most popular + curves use equation of this format (e.g. NIST256p). diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..112d5f97 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,61 @@ +.. python-ecdsa documentation master file, created by + sphinx-quickstart on Sat May 29 18:34:49 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to python-ecdsa's documentation! +======================================== + +``ecdsa`` implements +`elliptic-curve cryptography (ECC) `_, +more specifically the +`Elliptic Curve Digital Signature Algorithm (ECDSA) `_, +`Edwards-curve Digital Signature Algorithm (EdDSA) `_ +and the +`Elliptic Curve Diffie-Hellman (ECDH) `_ +algorithms. +All of those algorithms are used in many protocols in practice, like +in +`TLS `_ +or +`SSH `_. + +This library provides key generation, signing, verifying, and shared secret +derivation for five +popular NIST "Suite B" GF(p) (*prime field*) curves, with key lengths of 192, +224, 256, 384, and 521 bits. The "short names" for these curves, as known by +the OpenSSL tool (``openssl ecparam -list_curves``), are: ``prime192v1``, +``secp224r1``, ``prime256v1``, ``secp384r1``, and ``secp521r1``. It includes +the +256-bit curve ``secp256k1`` used by Bitcoin. There is also support for the +regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The +"short names" of those curves are: ``brainpoolP160r1``, ``brainpoolP192r1``, +``brainpoolP224r1``, ``brainpoolP256r1``, ``brainpoolP320r1``, +``brainpoolP384r1``, +``brainpoolP512r1``. Few of the small curves from SEC standard are also +included (mainly to speed-up testing of the library), those are: +``secp112r1``, ``secp112r2``, ``secp128r1``, and ``secp160r1``. +Key generation, signing and verifying is also supported for Ed25519 and Ed448 +curves. +No other curves are included, but it is not too hard to add support for more +curves over prime fields. + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + :hidden: + + quickstart + basics + ec_arithmetic + glossary + modules + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`glossary` +* :ref:`search` diff --git a/docs/source/modules.rst b/docs/source/modules.rst new file mode 100644 index 00000000..f5c495b8 --- /dev/null +++ b/docs/source/modules.rst @@ -0,0 +1,7 @@ +python-ecdsa API +================ + +.. toctree:: + :maxdepth: 4 + + ecdsa diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst new file mode 100644 index 00000000..e83a6a6f --- /dev/null +++ b/docs/source/quickstart.rst @@ -0,0 +1,178 @@ +=============== +Getting started +=============== + +The library has just one mandatory dependency: ``six``. +If you install ``python-ecdsa`` through pip, it should automatically +install ``six`` too. + +To install it you can run the following command: + +.. code:: bash + + pip install ecdsa + +The high level API provided by the library is primarily in the +:py:class:`~ecdsa.keys` module. +There you will find the :py:class:`~ecdsa.keys.SigningKey` (the class +that enables handling of the private keys) and the +:py:class:`~ecdsa.keys.VerifyingKey` (the class that enables handling of +the public keys). + +To handle shared key derivation, the :py:class:`~ecdsa.ecdh.ECDH` class +is used. + +Finally, in case use of custom elliptic curves is necessary, the +:py:class:`~ecdsa.curves.Curve` class may be needed. + +Key generation +============== + +To generate a key, import the :py:class:`~ecdsa.keys.SigningKey` and +call the :py:func:`~ecdsa.keys.SigningKey.generate` function in it: + +.. code:: python + + from ecdsa.keys import SigningKey + + key = SigningKey.generate() + +By default, that will create a key that uses the NIST P-192 curve. To +select a more secure curve, like NIST P-256, import it from the +:py:mod:`ecdsa.curves` or from the :py:mod:`ecdsa` module: + +.. code:: python + + from ecdsa import SigningKey, NIST256p + + key = SigningKey.generate(curve=NIST256p) + +Private key storage and retrieval +================================= + +To store a key as string or file, you can serialise it using many formats, +in general we recommend the PKCS#8 PEM encoding. + +If you have a :py:class:`~ecdsa.keys.SigningKey` object in ``key`` and +want to save it to a file like ``priv_key.pem`` you can run the following +code: + +.. code:: python + + with open("priv_key.pem", "wb") as f: + f.write(key.to_pem(format="pkcs8")) + +.. warning:: + + Not specifying the ``format=pkcs8`` will create a file that uses the legacy + ``ssleay`` file format which is most commonly used by applications + that use OpenSSL, as that was originally the only format supported by it. + For a long time though OpenSSL supports the PKCS# 8 format too. + +To read that file back, you can run code like this: + +.. code:: python + + from ecdsa import SigningKey + + with open("priv_key.pem") as f: + key = SigningKey.from_pem(f.read()) + +.. tip:: + + As the format is self-describing, the parser will automatically detect + if the provided file is in the ``ssleay`` or the ``pkcs8`` format + and process it accordingly. + +Public key derivation +===================== + +To get the public key associated with the given private key, either +call the :py:func:`~ecdsa.keys.SigningKey.get_verifying_key` method or +access the ``verifying_key`` attribute in +:py:class:`~ecdsa.keys.SigningKey` directly: + +.. code:: python + + from ecdsa import SigningKey, NIST256p + + private_key = SigningKey.generate(curve=NIST256p) + + public_key = private_key.verifying_key + +Public key storage and retrieval +================================ + +Similarly to private keys, public keys can be stored in files: + +.. code:: python + + from ecdsa import SigningKey + + private_key = SigningKey.generate() + + public_key = private_key.verifying_key + + with open("pub_key.pem", "wb") as f: + f.write(public_key.to_pem()) + +And read from files: + +.. code:: python + + from ecdsa import VerifyingKey + + with open("pub_key.pem") as f: + public_key = VerifyingKey.from_pem(f.read()) + +Signing +======= + +To sign a byte string stored in variable ``message`` using SigningKey in +``private_key``, SHA-256, get a signature in the DER format and save it to a +file, you can use the following code: + +.. code:: python + + from hashlib import sha256 + from ecdsa.util import sigencode_der + + sig = private_key.sign_deterministic( + message, + hashfunc=sha256, + sigencode=sigencode_der + ) + + with open("message.sig", "wb") as f: + f.write(sig) + +.. note:: + + As cryptographic hashes (SHA-256, SHA3-256, etc.) operate on *bytes* not + text strings, any text needs to be serialised into *bytes* before it can + be signed. This is because encoding of string "text" results in very + different bytes when it's encoded using UTF-8 and when it's encoded using + UCS-2. + +Verifying +========= + +To verify a signature of a byte string in ``message`` using a VerifyingKey +in ``public_key``, SHA-256 and a DER signature in a ``message.sig`` file, +you can use the following code: + +.. code:: python + + from hashlib import sha256 + from ecdsa import BadSignatureError + from ecdsa.util import sigdecode_der + + with open("message.sig", "rb") as f: + sig = f.read() + + try: + ret = public_key.verify(sig, message, sha256, sigdecode=sigdecode_der) + assert ret + print("Valid signature") + except BadSignatureError: + print("Incorrect signature") diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..ffe2fce4 --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +six diff --git a/setup.py b/setup.py index f0ef9a6c..618f5e6d 100755 --- a/setup.py +++ b/setup.py @@ -22,24 +22,27 @@ long_description_content_type="text/markdown", author="Brian Warner", author_email="warner@lothar.com", - url="http://github.com/warner/python-ecdsa", + url="http://github.com/tlsfuzzer/python-ecdsa", packages=["ecdsa"], package_dir={"": "src"}, license="MIT", cmdclass=commands, - python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", + python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, " + "!=3.5.*", classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ], install_requires=["six>=1.9.0"], extras_require={"gmpy2": "gmpy2", "gmpy": "gmpy"}, diff --git a/speed.py b/speed.py index 19132e0d..a3f9e59c 100644 --- a/speed.py +++ b/speed.py @@ -1,4 +1,3 @@ -import six import timeit from ecdsa.curves import curves @@ -8,7 +7,7 @@ def do(setup_statements, statement): t = timeit.Timer(stmt=statement, setup="\n".join(setup_statements)) # determine number so that 0.2 <= total time < 2.0 for i in range(1, 10): - number = 10 ** i + number = 10**i x = t.timeit(number) if x >= 0.2: break @@ -19,7 +18,8 @@ def do(setup_statements, statement): "{name:>16}{sep:1} {siglen:>6} {keygen:>9{form}}{unit:1} " "{keygen_inv:>9{form_inv}} {sign:>9{form}}{unit:1} " "{sign_inv:>9{form_inv}} {verify:>9{form}}{unit:1} " - "{verify_inv:>9{form_inv}}" + "{verify_inv:>9{form_inv}} {verify_single:>13{form}}{unit:1} " + "{verify_single_inv:>14{form_inv}}" ) print( @@ -31,6 +31,8 @@ def do(setup_statements, statement): sign_inv="sign/s", verify="verify", verify_inv="verify/s", + verify_single="no PC verify", + verify_single_inv="no PC verify/s", name="", sep="", unit="", @@ -40,9 +42,9 @@ def do(setup_statements, statement): ) for curve in [i.name for i in curves]: - S1 = "import six; from ecdsa import SigningKey, %s" % curve + S1 = "from ecdsa import SigningKey, %s" % curve S2 = "sk = SigningKey.generate(%s)" % curve - S3 = "msg = six.b('msg')" + S3 = "msg = b'msg'" S4 = "sig = sk.sign(msg)" S5 = "vk = sk.get_verifying_key()" S6 = "vk.precompute()" @@ -54,10 +56,11 @@ def do(setup_statements, statement): keygen = do([S1], S2) sign = do([S1, S2, S3], S4) verf = do([S1, S2, S3, S4, S5, S6], S7) + verf_single = do([S1, S2, S3, S4, S5], S7) import ecdsa c = getattr(ecdsa, curve) - sig = ecdsa.SigningKey.generate(c).sign(six.b("msg")) + sig = ecdsa.SigningKey.generate(c).sign(b"msg") print( prnt_form.format( name=curve, @@ -70,6 +73,8 @@ def do(setup_statements, statement): sign_inv=1.0 / sign, verify=verf, verify_inv=1.0 / verf, + verify_single=verf_single, + verify_single_inv=1.0 / verf_single, form=".5f", form_inv=".2f", ) @@ -92,6 +97,8 @@ def do(setup_statements, statement): ) for curve in [i.name for i in curves]: + if curve == "Ed25519" or curve == "Ed448": + continue S1 = "from ecdsa import SigningKey, ECDH, {0}".format(curve) S2 = "our = SigningKey.generate({0})".format(curve) S3 = "remote = SigningKey.generate({0}).verifying_key".format(curve) diff --git a/sql/combine.sql b/sql/combine.sql new file mode 100644 index 00000000..d2a4490d --- /dev/null +++ b/sql/combine.sql @@ -0,0 +1,7 @@ +attach 'session-to_merge.sqlite' as toMerge; +BEGIN; + insert into work_items select * from toMerge.work_items; + insert into mutation_specs select * from toMerge.mutation_specs; + insert into work_results select * from toMerge.work_results; +COMMIT; +detach toMerge; diff --git a/sql/create_to_del.sql b/sql/create_to_del.sql new file mode 100644 index 00000000..956d1a28 --- /dev/null +++ b/sql/create_to_del.sql @@ -0,0 +1,2 @@ +create table to_del (job_id VARCHAR NOT NULL, id INTEGER PRIMARY KEY); +insert into to_del select *, ROWID from work_items; diff --git a/sql/shard-db.sql b/sql/shard-db.sql new file mode 100644 index 00000000..7460bd31 --- /dev/null +++ b/sql/shard-db.sql @@ -0,0 +1,3 @@ +delete from mutation_specs where job_id in (select job_id from to_del where to_del.ID % 20 != %SHARD%); +delete from work_items where job_id in (select job_id from to_del where to_del.ID % 20 != %SHARD%); +drop table to_del; diff --git a/src/ecdsa/__init__.py b/src/ecdsa/__init__.py index 4ae0a114..342538e0 100644 --- a/src/ecdsa/__init__.py +++ b/src/ecdsa/__init__.py @@ -1,3 +1,6 @@ +# while we don't use six in this file, we did bundle it for a long time, so +# keep as part of module in a virtual way (through __all__) +import six from .keys import ( SigningKey, VerifyingKey, @@ -19,6 +22,19 @@ BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, ) from .ecdh import ( ECDH, @@ -28,13 +44,9 @@ InvalidSharedSecretError, ) from .der import UnexpectedDER +from . import _version -# This code comes from http://github.com/warner/python-ecdsa -from ._version import get_versions - -__version__ = get_versions()["version"] -del get_versions - +# This code comes from http://github.com/tlsfuzzer/python-ecdsa __all__ = [ "curves", "der", @@ -72,5 +84,21 @@ BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + six.b(""), + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, ] del _hush_pyflakes + +__version__ = _version.get_versions()["version"] diff --git a/src/ecdsa/_compat.py b/src/ecdsa/_compat.py index 720360bc..4558e33a 100644 --- a/src/ecdsa/_compat.py +++ b/src/ecdsa/_compat.py @@ -3,6 +3,7 @@ """ import sys import re +import binascii from six import integer_types @@ -14,7 +15,8 @@ def str_idx_as_int(string, index): return ord(val) -if sys.version_info < (3, 0): +if sys.version_info < (3, 0): # pragma: no branch + import platform def normalise_bytes(buffer_object): """Cast the input into array of bytes.""" @@ -24,37 +26,113 @@ def normalise_bytes(buffer_object): def hmac_compat(ret): return ret - if sys.version_info < (2, 7) or sys.version_info < (2, 7, 4): + if ( + sys.version_info < (2, 7) + or sys.version_info < (2, 7, 4) + or platform.system() == "Java" + ): # pragma: no branch def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text) + def compat26_str(val): + return str(val) + + def bit_length(val): + if val == 0: + return 0 + return len(bin(val)) - 2 + else: def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text, flags=re.UNICODE) + def compat26_str(val): + return val + + def bit_length(val): + """Return number of bits necessary to represent an integer.""" + return val.bit_length() + + def b2a_hex(val): + return binascii.b2a_hex(compat26_str(val)) + + def a2b_hex(val): + try: + return bytearray(binascii.a2b_hex(val)) + except Exception as e: + raise ValueError("base16 error: %s" % e) + + def bytes_to_int(val, byteorder): + """Convert bytes to an int.""" + if not val: + return 0 + if byteorder == "big": + return int(b2a_hex(val), 16) + if byteorder == "little": + return int(b2a_hex(val[::-1]), 16) + raise ValueError("Only 'big' and 'little' endian supported") + + def int_to_bytes(val, length=None, byteorder="big"): + """Return number converted to bytes""" + if length is None: + length = byte_length(val) + if byteorder == "big": + return bytearray( + (val >> i) & 0xFF for i in reversed(range(0, length * 8, 8)) + ) + if byteorder == "little": + return bytearray( + (val >> i) & 0xFF for i in range(0, length * 8, 8) + ) + raise ValueError("Only 'big' or 'little' endian supported") else: - if sys.version_info < (3, 4): - # on python 3.3 hmac.hmac.update() accepts only bytes, on newer - # versions it does accept memoryview() also - def hmac_compat(data): - if not isinstance(data, bytes): - return bytes(data) - return data - - else: - def hmac_compat(data): - return data + def hmac_compat(data): + return data def normalise_bytes(buffer_object): """Cast the input into array of bytes.""" return memoryview(buffer_object).cast("B") + def compat26_str(val): + return val + def remove_whitespace(text): """Removes all whitespace from passed in string""" return re.sub(r"\s+", "", text, flags=re.UNICODE) + + def a2b_hex(val): + try: + return bytearray(binascii.a2b_hex(bytearray(val, "ascii"))) + except Exception as e: + raise ValueError("base16 error: %s" % e) + + # pylint: disable=invalid-name + # pylint is stupid here and doesn't notice it's a function, not + # constant + bytes_to_int = int.from_bytes + # pylint: enable=invalid-name + + def bit_length(val): + """Return number of bits necessary to represent an integer.""" + return val.bit_length() + + def int_to_bytes(val, length=None, byteorder="big"): + """Convert integer to bytes.""" + if length is None: + length = byte_length(val) + # for gmpy we need to convert back to native int + if not isinstance(val, int): + val = int(val) + return bytearray(val.to_bytes(length=length, byteorder=byteorder)) + + +def byte_length(val): + """Return number of bytes necessary to represent an integer.""" + length = bit_length(val) + return (length + 7) // 8 diff --git a/src/ecdsa/_rwlock.py b/src/ecdsa/_rwlock.py deleted file mode 100644 index 010e4981..00000000 --- a/src/ecdsa/_rwlock.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Mateusz Kobos, (c) 2011 -# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ -# released under the MIT licence - -import threading - - -__author__ = "Mateusz Kobos" - - -class RWLock: - """ - Read-Write locking primitive - - Synchronization object used in a solution of so-called second - readers-writers problem. In this problem, many readers can simultaneously - access a share, and a writer has an exclusive access to this share. - Additionally, the following constraints should be met: - 1) no reader should be kept waiting if the share is currently opened for - reading unless a writer is also waiting for the share, - 2) no writer should be kept waiting for the share longer than absolutely - necessary. - - The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7] - with a modification -- adding an additional lock (C{self.__readers_queue}) - -- in accordance with [2]. - - Sources: - [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008 - [2] P.J. Courtois, F. Heymans, D.L. Parnas: - "Concurrent Control with 'Readers' and 'Writers'", - Communications of the ACM, 1971 (via [3]) - [3] http://en.wikipedia.org/wiki/Readers-writers_problem - """ - - def __init__(self): - """ - A lock giving an even higher priority to the writer in certain - cases (see [2] for a discussion). - """ - self.__read_switch = _LightSwitch() - self.__write_switch = _LightSwitch() - self.__no_readers = threading.Lock() - self.__no_writers = threading.Lock() - self.__readers_queue = threading.Lock() - - def reader_acquire(self): - self.__readers_queue.acquire() - self.__no_readers.acquire() - self.__read_switch.acquire(self.__no_writers) - self.__no_readers.release() - self.__readers_queue.release() - - def reader_release(self): - self.__read_switch.release(self.__no_writers) - - def writer_acquire(self): - self.__write_switch.acquire(self.__no_readers) - self.__no_writers.acquire() - - def writer_release(self): - self.__no_writers.release() - self.__write_switch.release(self.__no_readers) - - -class _LightSwitch: - """An auxiliary "light switch"-like object. The first thread turns on the - "switch", the last one turns it off (see [1, sec. 4.2.2] for details).""" - - def __init__(self): - self.__counter = 0 - self.__mutex = threading.Lock() - - def acquire(self, lock): - self.__mutex.acquire() - self.__counter += 1 - if self.__counter == 1: - lock.acquire() - self.__mutex.release() - - def release(self, lock): - self.__mutex.acquire() - self.__counter -= 1 - if self.__counter == 0: - lock.release() - self.__mutex.release() diff --git a/src/ecdsa/_sha3.py b/src/ecdsa/_sha3.py new file mode 100644 index 00000000..2db00586 --- /dev/null +++ b/src/ecdsa/_sha3.py @@ -0,0 +1,181 @@ +""" +Implementation of the SHAKE-256 algorithm for Ed448 +""" + +try: + import hashlib + + hashlib.new("shake256").digest(64) + + def shake_256(msg, outlen): + return hashlib.new("shake256", msg).digest(outlen) + +except (TypeError, ValueError): + + from ._compat import bytes_to_int, int_to_bytes + + # From little endian. + def _from_le(s): + return bytes_to_int(s, byteorder="little") + + # Rotate a word x by b places to the left. + def _rol(x, b): + return ((x << b) | (x >> (64 - b))) & (2**64 - 1) + + # Do the SHA-3 state transform on state s. + def _sha3_transform(s): + ROTATIONS = [ + 0, + 1, + 62, + 28, + 27, + 36, + 44, + 6, + 55, + 20, + 3, + 10, + 43, + 25, + 39, + 41, + 45, + 15, + 21, + 8, + 18, + 2, + 61, + 56, + 14, + ] + PERMUTATION = [ + 1, + 6, + 9, + 22, + 14, + 20, + 2, + 12, + 13, + 19, + 23, + 15, + 4, + 24, + 21, + 8, + 16, + 5, + 3, + 18, + 17, + 11, + 7, + 10, + ] + RC = [ + 0x0000000000000001, + 0x0000000000008082, + 0x800000000000808A, + 0x8000000080008000, + 0x000000000000808B, + 0x0000000080000001, + 0x8000000080008081, + 0x8000000000008009, + 0x000000000000008A, + 0x0000000000000088, + 0x0000000080008009, + 0x000000008000000A, + 0x000000008000808B, + 0x800000000000008B, + 0x8000000000008089, + 0x8000000000008003, + 0x8000000000008002, + 0x8000000000000080, + 0x000000000000800A, + 0x800000008000000A, + 0x8000000080008081, + 0x8000000000008080, + 0x0000000080000001, + 0x8000000080008008, + ] + + for rnd in range(0, 24): + # AddColumnParity (Theta) + c = [0] * 5 + d = [0] * 5 + for i in range(0, 25): + c[i % 5] ^= s[i] + for i in range(0, 5): + d[i] = c[(i + 4) % 5] ^ _rol(c[(i + 1) % 5], 1) + for i in range(0, 25): + s[i] ^= d[i % 5] + # RotateWords (Rho) + for i in range(0, 25): + s[i] = _rol(s[i], ROTATIONS[i]) + # PermuteWords (Pi) + t = s[PERMUTATION[0]] + for i in range(0, len(PERMUTATION) - 1): + s[PERMUTATION[i]] = s[PERMUTATION[i + 1]] + s[PERMUTATION[-1]] = t + # NonlinearMixRows (Chi) + for i in range(0, 25, 5): + t = [ + s[i], + s[i + 1], + s[i + 2], + s[i + 3], + s[i + 4], + s[i], + s[i + 1], + ] + for j in range(0, 5): + s[i + j] = t[j] ^ ((~t[j + 1]) & (t[j + 2])) + # AddRoundConstant (Iota) + s[0] ^= RC[rnd] + + # Reinterpret octet array b to word array and XOR it to state s. + def _reinterpret_to_words_and_xor(s, b): + for j in range(0, len(b) // 8): + s[j] ^= _from_le(b[8 * j : 8 * j + 8]) + + # Reinterpret word array w to octet array and return it. + def _reinterpret_to_octets(w): + mp = bytearray() + for j in range(0, len(w)): + mp += int_to_bytes(w[j], 8, byteorder="little") + return mp + + def _sha3_raw(msg, r_w, o_p, e_b): + """Semi-generic SHA-3 implementation""" + r_b = 8 * r_w + s = [0] * 25 + # Handle whole blocks. + idx = 0 + blocks = len(msg) // r_b + for i in range(0, blocks): + _reinterpret_to_words_and_xor(s, msg[idx : idx + r_b]) + idx += r_b + _sha3_transform(s) + # Handle last block padding. + m = bytearray(msg[idx:]) + m.append(o_p) + while len(m) < r_b: + m.append(0) + m[len(m) - 1] |= 128 + # Handle padded last block. + _reinterpret_to_words_and_xor(s, m) + _sha3_transform(s) + # Output. + out = bytearray() + while len(out) < e_b: + out += _reinterpret_to_octets(s[:r_w]) + _sha3_transform(s) + return out[:e_b] + + def shake_256(msg, outlen): + return _sha3_raw(msg, 17, 31, outlen) diff --git a/src/ecdsa/_version.py b/src/ecdsa/_version.py index a72288a9..8c4354fd 100644 --- a/src/ecdsa/_version.py +++ b/src/ecdsa/_version.py @@ -5,7 +5,7 @@ # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.17 (https://github.com/warner/python-versioneer) +# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -56,7 +56,7 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" @@ -73,20 +73,20 @@ def run_command( ): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, + process = subprocess.Popen( + [command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), ) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -98,15 +98,15 @@ def run_command( if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() + stdout = process.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() - if p.returncode != 0: + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): @@ -118,7 +118,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return { @@ -128,9 +128,8 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): "error": None, "date": None, } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: print( @@ -149,22 +148,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -172,10 +170,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -188,11 +190,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = set(r.strip() for r in refnames.strip("()").split(",")) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = set(r[len(TAG) :] for r in refs if r.startswith(TAG)) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -201,7 +203,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = set(r for r in refs if re.search(r"\d", r)) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -210,6 +212,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue if verbose: print("picking %s" % r) return { @@ -232,7 +239,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -240,10 +247,12 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command( + _, rc = runner( GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True ) if rc != 0: @@ -253,7 +262,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( + describe_out, rc = runner( GITS, [ "describe", @@ -262,7 +271,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): "--always", "--long", "--match", - "%s*" % tag_prefix, + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), ], cwd=root, ) @@ -270,7 +279,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -280,6 +289,40 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner( + GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root + ) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -296,7 +339,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ( "unable to parse git-describe output: '%s'" % describe_out ) @@ -324,15 +367,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command( - GITS, ["rev-list", "HEAD", "--count"], cwd=root - ) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 0 ].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -369,19 +413,71 @@ def render_pep440(pieces): return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post( + pieces["closest-tag"] + ) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % ( + post_version + 1, + pieces["distance"], + ) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -412,12 +508,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -490,10 +615,14 @@ def render(pieces, style): if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -534,7 +663,7 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): + for _ in cfg.versionfile_source.split("/"): root = os.path.dirname(root) except NameError: return { diff --git a/src/ecdsa/curves.py b/src/ecdsa/curves.py index 9a103803..38e3a758 100644 --- a/src/ecdsa/curves.py +++ b/src/ecdsa/curves.py @@ -1,7 +1,9 @@ from __future__ import division -from . import der, ecdsa -from .util import orderlen +from six import PY2 +from . import der, ecdsa, ellipticcurve, eddsa +from .util import orderlen, number_to_string, string_to_number +from ._compat import normalise_bytes, bit_length # orderlen was defined in this module previously, so keep it in __all__, @@ -10,6 +12,10 @@ "UnknownCurveError", "orderlen", "Curve", + "SECP112r1", + "SECP112r2", + "SECP128r1", + "SECP160r1", "NIST192p", "NIST224p", "NIST256p", @@ -17,17 +23,33 @@ "NIST521p", "curves", "find_curve", + "curve_by_name", "SECP256k1", "BRAINPOOLP160r1", + "BRAINPOOLP160t1", "BRAINPOOLP192r1", + "BRAINPOOLP192t1", "BRAINPOOLP224r1", + "BRAINPOOLP224t1", "BRAINPOOLP256r1", + "BRAINPOOLP256t1", "BRAINPOOLP320r1", + "BRAINPOOLP320t1", "BRAINPOOLP384r1", + "BRAINPOOLP384t1", "BRAINPOOLP512r1", + "BRAINPOOLP512t1", + "PRIME_FIELD_OID", + "CHARACTERISTIC_TWO_FIELD_OID", + "Ed25519", + "Ed448", ] +PRIME_FIELD_OID = (1, 2, 840, 10045, 1, 1) +CHARACTERISTIC_TWO_FIELD_OID = (1, 2, 840, 10045, 1, 2) + + class UnknownCurveError(Exception): pass @@ -39,15 +61,262 @@ def __init__(self, name, curve, generator, oid, openssl_name=None): self.curve = curve self.generator = generator self.order = generator.order() - self.baselen = orderlen(self.order) - self.verifying_key_length = 2 * self.baselen + if isinstance(curve, ellipticcurve.CurveEdTw): + # EdDSA keys are special in that both private and public + # are the same size (as it's defined only with compressed points) + + # +1 for the sign bit and then round up + self.baselen = (bit_length(curve.p()) + 1 + 7) // 8 + self.verifying_key_length = self.baselen + else: + self.baselen = orderlen(self.order) + self.verifying_key_length = 2 * orderlen(curve.p()) self.signature_length = 2 * self.baselen self.oid = oid - self.encoded_oid = der.encode_oid(*oid) + if oid: + self.encoded_oid = der.encode_oid(*oid) + + def __eq__(self, other): + if isinstance(other, Curve): + return ( + self.curve == other.curve and self.generator == other.generator + ) + return NotImplemented + + def __ne__(self, other): + return not self == other def __repr__(self): return self.name + def to_der(self, encoding=None, point_encoding="uncompressed"): + """Serialise the curve parameters to binary string. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: DER encoded ECParameters structure + :rtype: bytes + """ + if encoding is None: + if self.oid: + encoding = "named_curve" + else: + encoding = "explicit" + + if encoding not in ("named_curve", "explicit"): + raise ValueError( + "Only 'named_curve' and 'explicit' encodings supported" + ) + + if encoding == "named_curve": + if not self.oid: + raise UnknownCurveError( + "Can't encode curve using named_curve encoding without " + "associated curve OID" + ) + return der.encode_oid(*self.oid) + elif isinstance(self.curve, ellipticcurve.CurveEdTw): + assert encoding == "explicit" + raise UnknownCurveError( + "Twisted Edwards curves don't support explicit encoding" + ) + + # encode the ECParameters sequence + curve_p = self.curve.p() + version = der.encode_integer(1) + field_id = der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p) + ) + curve = der.encode_sequence( + der.encode_octet_string( + number_to_string(self.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(self.curve.b() % curve_p, curve_p) + ), + ) + base = der.encode_octet_string(self.generator.to_bytes(point_encoding)) + order = der.encode_integer(self.generator.order()) + seq_elements = [version, field_id, curve, base, order] + if self.curve.cofactor(): + cofactor = der.encode_integer(self.curve.cofactor()) + seq_elements.append(cofactor) + + return der.encode_sequence(*seq_elements) + + def to_pem(self, encoding=None, point_encoding="uncompressed"): + """ + Serialise the curve parameters to the :term:`PEM` format. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: PEM encoded ECParameters structure + :rtype: str + """ + return der.topem( + self.to_der(encoding, point_encoding), "EC PARAMETERS" + ) + + @staticmethod + def from_der(data, valid_encodings=None): + """Decode the curve parameters from DER file. + + :param data: the binary string to decode the parameters from + :type data: :term:`bytes-like object` + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not valid_encodings: + valid_encodings = set(("named_curve", "explicit")) + if not all(i in ["named_curve", "explicit"] for i in valid_encodings): + raise ValueError( + "Only named_curve and explicit encodings supported" + ) + data = normalise_bytes(data) + if not der.is_sequence(data): + if "named_curve" not in valid_encodings: + raise der.UnexpectedDER( + "named_curve curve parameters not allowed" + ) + oid, empty = der.remove_object(data) + if empty: + raise der.UnexpectedDER("Unexpected data after OID") + return find_curve(oid) + + if "explicit" not in valid_encodings: + raise der.UnexpectedDER("explicit curve parameters not allowed") + + seq, empty = der.remove_sequence(data) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters structure" + ) + # decode the ECParameters sequence + version, rest = der.remove_integer(seq) + if version != 1: + raise der.UnexpectedDER("Unknown parameter encoding format") + field_id, rest = der.remove_sequence(rest) + curve, rest = der.remove_sequence(rest) + base_bytes, rest = der.remove_octet_string(rest) + order, rest = der.remove_integer(rest) + cofactor = None + if rest: + # the ASN.1 specification of ECParameters allows for future + # extensions of the sequence, so ignore the remaining bytes + cofactor, _ = der.remove_integer(rest) + + # decode the ECParameters.fieldID sequence + field_type, rest = der.remove_object(field_id) + if field_type == CHARACTERISTIC_TWO_FIELD_OID: + raise UnknownCurveError("Characteristic 2 curves unsupported") + if field_type != PRIME_FIELD_OID: + raise UnknownCurveError( + "Unknown field type: {0}".format(field_type) + ) + prime, empty = der.remove_integer(rest) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters.fieldID.Prime-p element" + ) + + # decode the ECParameters.curve sequence + curve_a_bytes, rest = der.remove_octet_string(curve) + curve_b_bytes, rest = der.remove_octet_string(rest) + # seed can be defined here, but we don't parse it, so ignore `rest` + + curve_a = string_to_number(curve_a_bytes) + curve_b = string_to_number(curve_b_bytes) + + curve_fp = ellipticcurve.CurveFp(prime, curve_a, curve_b, cofactor) + + # decode the ECParameters.base point + + base = ellipticcurve.PointJacobi.from_bytes( + curve_fp, + base_bytes, + valid_encodings=("uncompressed", "compressed", "hybrid"), + order=order, + generator=True, + ) + tmp_curve = Curve("unknown", curve_fp, base, None) + + # if the curve matches one of the well-known ones, use the well-known + # one in preference, as it will have the OID and name associated + for i in curves: + if tmp_curve == i: + return i + return tmp_curve + + @classmethod + def from_pem(cls, string, valid_encodings=None): + """Decode the curve parameters from PEM file. + + :param str string: the text string to decode the parameters from + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not PY2 and isinstance(string, str): # pragma: no branch + string = string.encode() + + ec_param_index = string.find(b"-----BEGIN EC PARAMETERS-----") + if ec_param_index == -1: + raise der.UnexpectedDER("EC PARAMETERS PEM header not found") + + return cls.from_der( + der.unpem(string[ec_param_index:]), valid_encodings + ) + + +# the SEC curves +SECP112r1 = Curve( + "SECP112r1", + ecdsa.curve_112r1, + ecdsa.generator_112r1, + (1, 3, 132, 0, 6), + "secp112r1", +) + + +SECP112r2 = Curve( + "SECP112r2", + ecdsa.curve_112r2, + ecdsa.generator_112r2, + (1, 3, 132, 0, 7), + "secp112r2", +) + + +SECP128r1 = Curve( + "SECP128r1", + ecdsa.curve_128r1, + ecdsa.generator_128r1, + (1, 3, 132, 0, 28), + "secp128r1", +) + + +SECP160r1 = Curve( + "SECP160r1", + ecdsa.curve_160r1, + ecdsa.generator_160r1, + (1, 3, 132, 0, 8), + "secp160r1", +) + # the NIST curves NIST192p = Curve( @@ -113,6 +382,15 @@ def __repr__(self): ) +BRAINPOOLP160t1 = Curve( + "BRAINPOOLP160t1", + ecdsa.curve_brainpoolp160t1, + ecdsa.generator_brainpoolp160t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 2), + "brainpoolP160t1", +) + + BRAINPOOLP192r1 = Curve( "BRAINPOOLP192r1", ecdsa.curve_brainpoolp192r1, @@ -122,6 +400,15 @@ def __repr__(self): ) +BRAINPOOLP192t1 = Curve( + "BRAINPOOLP192t1", + ecdsa.curve_brainpoolp192t1, + ecdsa.generator_brainpoolp192t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 4), + "brainpoolP192t1", +) + + BRAINPOOLP224r1 = Curve( "BRAINPOOLP224r1", ecdsa.curve_brainpoolp224r1, @@ -131,6 +418,15 @@ def __repr__(self): ) +BRAINPOOLP224t1 = Curve( + "BRAINPOOLP224t1", + ecdsa.curve_brainpoolp224t1, + ecdsa.generator_brainpoolp224t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 6), + "brainpoolP224t1", +) + + BRAINPOOLP256r1 = Curve( "BRAINPOOLP256r1", ecdsa.curve_brainpoolp256r1, @@ -140,6 +436,15 @@ def __repr__(self): ) +BRAINPOOLP256t1 = Curve( + "BRAINPOOLP256t1", + ecdsa.curve_brainpoolp256t1, + ecdsa.generator_brainpoolp256t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 8), + "brainpoolP256t1", +) + + BRAINPOOLP320r1 = Curve( "BRAINPOOLP320r1", ecdsa.curve_brainpoolp320r1, @@ -149,6 +454,15 @@ def __repr__(self): ) +BRAINPOOLP320t1 = Curve( + "BRAINPOOLP320t1", + ecdsa.curve_brainpoolp320t1, + ecdsa.generator_brainpoolp320t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 10), + "brainpoolP320t1", +) + + BRAINPOOLP384r1 = Curve( "BRAINPOOLP384r1", ecdsa.curve_brainpoolp384r1, @@ -158,6 +472,15 @@ def __repr__(self): ) +BRAINPOOLP384t1 = Curve( + "BRAINPOOLP384t1", + ecdsa.curve_brainpoolp384t1, + ecdsa.generator_brainpoolp384t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 12), + "brainpoolP384t1", +) + + BRAINPOOLP512r1 = Curve( "BRAINPOOLP512r1", ecdsa.curve_brainpoolp512r1, @@ -167,6 +490,32 @@ def __repr__(self): ) +BRAINPOOLP512t1 = Curve( + "BRAINPOOLP512t1", + ecdsa.curve_brainpoolp512t1, + ecdsa.generator_brainpoolp512t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 14), + "brainpoolP512t1", +) + + +Ed25519 = Curve( + "Ed25519", + eddsa.curve_ed25519, + eddsa.generator_ed25519, + (1, 3, 101, 112), +) + + +Ed448 = Curve( + "Ed448", + eddsa.curve_ed448, + eddsa.generator_ed448, + (1, 3, 101, 113), +) + + +# no order in particular, but keep previously added curves first curves = [ NIST192p, NIST224p, @@ -181,10 +530,33 @@ def __repr__(self): BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, ] def find_curve(oid_curve): + """Select a curve based on its OID + + :param tuple[int,...] oid_curve: ASN.1 Object Identifier of the + curve to return, like ``(1, 2, 840, 10045, 3, 1, 7)`` for ``NIST256p``. + + :raises UnknownCurveError: When the oid doesn't match any of the supported + curves + + :rtype: ~ecdsa.curves.Curve + """ for c in curves: if c.oid == oid_curve: return c @@ -192,3 +564,27 @@ def find_curve(oid_curve): "I don't know about the curve with oid %s." "I only know about these: %s" % (oid_curve, [c.name for c in curves]) ) + + +def curve_by_name(name): + """Select a curve based on its name. + + Returns a :py:class:`~ecdsa.curves.Curve` object with a ``name`` name. + Note that ``name`` is case-sensitve. + + :param str name: Name of the curve to return, like ``NIST256p`` or + ``prime256v1`` + + :raises UnknownCurveError: When the name doesn't match any of the supported + curves + + :rtype: ~ecdsa.curves.Curve + """ + for c in curves: + if name == c.name or (c.openssl_name and name == c.openssl_name): + return c + raise UnknownCurveError( + "Curve with name {0!r} unknown, only curves supported: {1}".format( + name, [c.name for c in curves] + ) + ) diff --git a/src/ecdsa/der.py b/src/ecdsa/der.py index 8c1de9ba..7a06b681 100644 --- a/src/ecdsa/der.py +++ b/src/ecdsa/der.py @@ -4,8 +4,8 @@ import base64 import warnings from itertools import chain -from six import int2byte, b, text_type -from ._compat import str_idx_as_int +from six import int2byte, text_type +from ._compat import compat26_str, str_idx_as_int class UnexpectedDER(Exception): @@ -16,20 +16,46 @@ def encode_constructed(tag, value): return int2byte(0xA0 + tag) + encode_length(len(value)) + value +def encode_implicit(tag, value, cls="context-specific"): + """ + Encode and IMPLICIT value using :term:`DER`. + + :param int tag: the tag value to encode, must be between 0 an 31 inclusive + :param bytes value: the data to encode + :param str cls: the class of the tag to encode: "application", + "context-specific", or "private" + :rtype: bytes + """ + if cls not in ("application", "context-specific", "private"): + raise ValueError("invalid tag class") + if tag > 31: + raise ValueError("Long tags not supported") + + if cls == "application": + tag_class = 0b01000000 + elif cls == "context-specific": + tag_class = 0b10000000 + else: + assert cls == "private" + tag_class = 0b11000000 + + return int2byte(tag_class + tag) + encode_length(len(value)) + value + + def encode_integer(r): assert r >= 0 # can't support negative numbers yet h = ("%x" % r).encode() if len(h) % 2: - h = b("0") + h + h = b"0" + h s = binascii.unhexlify(h) num = str_idx_as_int(s, 0) if num <= 0x7F: - return b("\x02") + encode_length(len(s)) + s + return b"\x02" + encode_length(len(s)) + s else: # DER integers are two's complement, so if the first byte is # 0x80-0xff then we need an extra 0x00 byte to prevent it from # looking negative. - return b("\x02") + encode_length(len(s) + 1) + b("\x00") + s + return b"\x02" + encode_length(len(s) + 1) + b"\x00" + s # sentry object to check if an argument was specified (used to detect @@ -87,15 +113,15 @@ def encode_bitstring(s, unused=_sentry): if not s: raise ValueError("unused is non-zero but s is empty") last = str_idx_as_int(s, -1) - if last & (2 ** unused - 1): + if last & (2**unused - 1): raise ValueError("unused bits must be zeros in DER") encoded_unused = int2byte(unused) len_extra = 1 - return b("\x03") + encode_length(len(s) + len_extra) + encoded_unused + s + return b"\x03" + encode_length(len(s) + len_extra) + encoded_unused + s def encode_octet_string(s): - return b("\x04") + encode_length(len(s)) + s + return b"\x04" + encode_length(len(s)) + s def encode_oid(first, second, *pieces): @@ -111,7 +137,7 @@ def encode_oid(first, second, *pieces): def encode_sequence(*encoded_pieces): total_len = sum([len(p) for p in encoded_pieces]) - return b("\x30") + encode_length(total_len) + b("").join(encoded_pieces) + return b"\x30" + encode_length(total_len) + b"".join(encoded_pieces) def encode_number(n): @@ -122,7 +148,7 @@ def encode_number(n): if not b128_digits: b128_digits.append(0) b128_digits[-1] &= 0x7F - return b("").join([int2byte(d) for d in b128_digits]) + return b"".join([int2byte(d) for d in b128_digits]) def is_sequence(string): @@ -142,6 +168,49 @@ def remove_constructed(string): return tag, body, rest +def remove_implicit(string, exp_class="context-specific"): + """ + Removes an IMPLICIT tagged value from ``string`` following :term:`DER`. + + :param bytes string: a byte string that can have one or more + DER elements. + :param str exp_class: the expected tag class of the implicitly + encoded value. Possible values are: "context-specific", "application", + and "private". + :return: a tuple with first value being the tag without indicator bits, + second being the raw bytes of the value and the third one being + remaining bytes (or an empty string if there are none) + :rtype: tuple(int,bytes,bytes) + """ + if exp_class not in ("context-specific", "application", "private"): + raise ValueError("invalid `exp_class` value") + if exp_class == "application": + tag_class = 0b01000000 + elif exp_class == "context-specific": + tag_class = 0b10000000 + else: + assert exp_class == "private" + tag_class = 0b11000000 + tag_mask = 0b11000000 + + s0 = str_idx_as_int(string, 0) + + if (s0 & tag_mask) != tag_class: + raise UnexpectedDER( + "wanted class {0}, got 0x{1:02x} tag".format(exp_class, s0) + ) + if s0 & 0b00100000 != 0: + raise UnexpectedDER( + "wanted type primitive, got 0x{0:02x} tag".format(s0) + ) + + tag = s0 & 0x1F + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return tag, body, rest + + def remove_sequence(string): if not string: raise UnexpectedDER("Empty string does not encode a sequence") @@ -254,7 +323,7 @@ def encode_length(l): return int2byte(l) s = ("%x" % l).encode() if len(s) % 2: - s = b("0") + s + s = b"0" + s s = binascii.unhexlify(s) llen = len(s) return int2byte(0x80 | llen) + s @@ -348,7 +417,7 @@ def remove_bitstring(string, expect_unused=_sentry): raise UnexpectedDER("Invalid encoding of empty bit string") last = str_idx_as_int(body, -1) # verify that all the unused bits are set to zero (DER requirement) - if last & (2 ** unused - 1): + if last & (2**unused - 1): raise UnexpectedDER("Non zero padding bits in bit string") if expect_unused is None: body = (body, unused) @@ -386,24 +455,24 @@ def remove_bitstring(string, expect_unused=_sentry): def unpem(pem): - if isinstance(pem, text_type): + if isinstance(pem, text_type): # pragma: no branch pem = pem.encode() - d = b("").join( + d = b"".join( [ l.strip() - for l in pem.split(b("\n")) - if l and not l.startswith(b("-----")) + for l in pem.split(b"\n") + if l and not l.startswith(b"-----") ] ) return base64.b64decode(d) def topem(der, name): - b64 = base64.b64encode(der) + b64 = base64.b64encode(compat26_str(der)) lines = [("-----BEGIN %s-----\n" % name).encode()] lines.extend( - [b64[start : start + 64] + b("\n") for start in range(0, len(b64), 64)] + [b64[start : start + 76] + b"\n" for start in range(0, len(b64), 76)] ) lines.append(("-----END %s-----\n" % name).encode()) - return b("").join(lines) + return b"".join(lines) diff --git a/src/ecdsa/ecdh.py b/src/ecdsa/ecdh.py index 9173279f..7f697d9a 100644 --- a/src/ecdsa/ecdh.py +++ b/src/ecdsa/ecdh.py @@ -116,7 +116,7 @@ def generate_private_key(self): :raises NoCurveError: Curve must be set before key generation. :return: public (verifying) key from this private key. - :rtype: VerifyingKey object + :rtype: VerifyingKey """ if not self.curve: raise NoCurveError("Curve must be set prior to key generation.") @@ -135,7 +135,7 @@ def load_private_key(self, private_key): :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. - :rtype: VerifyingKey object + :rtype: VerifyingKey """ if not self.curve: self.curve = private_key.curve @@ -158,7 +158,7 @@ def load_private_key_bytes(self, private_key): :raises NoCurveError: Curve must be set before loading. :return: public (verifying) key from this private key. - :rtype: VerifyingKey object + :rtype: VerifyingKey """ if not self.curve: raise NoCurveError("Curve must be set prior to key load.") @@ -183,7 +183,7 @@ def load_private_key_der(self, private_key_der): :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. - :rtype: VerifyingKey object + :rtype: VerifyingKey """ return self.load_private_key(SigningKey.from_der(private_key_der)) @@ -204,7 +204,7 @@ def load_private_key_pem(self, private_key_pem): :raises InvalidCurveError: private_key curve not the same as self.curve :return: public (verifying) key from this private key. - :rtype: VerifyingKey object + :rtype: VerifyingKey """ return self.load_private_key(SigningKey.from_pem(private_key_pem)) @@ -215,8 +215,8 @@ def get_public_key(self): Needs to be sent to the remote party. :return: public (verifying) key from local private key. - :rtype: VerifyingKey object - """ + :rtype: VerifyingKey + """ return self.private_key.get_verifying_key() def load_received_public_key(self, public_key): @@ -237,7 +237,9 @@ def load_received_public_key(self, public_key): raise InvalidCurveError("Curve mismatch.") self.public_key = public_key - def load_received_public_key_bytes(self, public_key_str): + def load_received_public_key_bytes( + self, public_key_str, valid_encodings=None + ): """ Load public key from byte string. @@ -247,9 +249,16 @@ def load_received_public_key_bytes(self, public_key_str): :param public_key_str: public key in bytes string format :type public_key_str: :term:`bytes-like object` + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` """ return self.load_received_public_key( - VerifyingKey.from_string(public_key_str, self.curve) + VerifyingKey.from_string( + public_key_str, self.curve, valid_encodings + ) ) def load_received_public_key_der(self, public_key_der): @@ -301,10 +310,10 @@ def generate_sharedsecret_bytes(self): :raises NoKeyError: public_key or private_key is not set :return: shared secret - :rtype: byte string + :rtype: bytes """ return number_to_string( - self.generate_sharedsecret(), self.private_key.curve.order + self.generate_sharedsecret(), self.private_key.curve.curve.p() ) def generate_sharedsecret(self): @@ -314,9 +323,9 @@ def generate_sharedsecret(self): The objects needs to have both private key and received public key before generation is allowed. - It's the same for local and remote party. - shared secret(local private key, remote public key ) == - shared secret (local public key, remote private key) + It's the same for local and remote party, + shared secret(local private key, remote public key) == + shared secret(local public key, remote private key) :raises InvalidCurveError: public_key curve not the same as self.curve :raises NoKeyError: public_key or private_key is not set diff --git a/src/ecdsa/ecdsa.py b/src/ecdsa/ecdsa.py index d785a457..f7109659 100644 --- a/src/ecdsa/ecdsa.py +++ b/src/ecdsa/ecdsa.py @@ -1,59 +1,71 @@ #! /usr/bin/env python """ -Implementation of Elliptic-Curve Digital Signatures. +Low level implementation of Elliptic-Curve Digital Signatures. + +.. note :: + You're most likely looking for the :py:class:`~ecdsa.keys` module. + This is a low-level implementation of the ECDSA that operates on + integers, not byte strings. + +NOTE: This a low level implementation of ECDSA, for normal applications +you should be looking at the keys.py module. Classes and methods for elliptic-curve signatures: private keys, public keys, signatures, -NIST prime-modulus curves with modulus lengths of -192, 224, 256, 384, and 521 bits. +and definitions of prime-modulus curves. Example: - # (In real-life applications, you would probably want to - # protect against defects in SystemRandom.) - from random import SystemRandom - randrange = SystemRandom().randrange +.. code-block:: python - # Generate a public/private key pair using the NIST Curve P-192: + # (In real-life applications, you would probably want to + # protect against defects in SystemRandom.) + from random import SystemRandom + randrange = SystemRandom().randrange - g = generator_192 - n = g.order() - secret = randrange( 1, n ) - pubkey = Public_key( g, g * secret ) - privkey = Private_key( pubkey, secret ) + # Generate a public/private key pair using the NIST Curve P-192: - # Signing a hash value: + g = generator_192 + n = g.order() + secret = randrange( 1, n ) + pubkey = Public_key( g, g * secret ) + privkey = Private_key( pubkey, secret ) - hash = randrange( 1, n ) - signature = privkey.sign( hash, randrange( 1, n ) ) + # Signing a hash value: - # Verifying a signature for a hash value: + hash = randrange( 1, n ) + signature = privkey.sign( hash, randrange( 1, n ) ) - if pubkey.verifies( hash, signature ): - print_("Demo verification succeeded.") - else: - print_("*** Demo verification failed.") + # Verifying a signature for a hash value: - # Verification fails if the hash value is modified: + if pubkey.verifies( hash, signature ): + print("Demo verification succeeded.") + else: + print("*** Demo verification failed.") - if pubkey.verifies( hash-1, signature ): - print_("**** Demo verification failed to reject tampered hash.") - else: - print_("Demo verification correctly rejected tampered hash.") + # Verification fails if the hash value is modified: -Version of 2009.05.16. + if pubkey.verifies( hash-1, signature ): + print("**** Demo verification failed to reject tampered hash.") + else: + print("Demo verification correctly rejected tampered hash.") Revision history: 2005.12.31 - Initial version. + 2008.11.25 - Substantial revisions introducing new classes. + 2009.05.16 - Warn against using random.randrange in real applications. + 2009.05.17 - Use random.SystemRandom by default. -Written in 2005 by Peter Pearson and placed in the public domain. +Originally written in 2005 by Peter Pearson and placed in the public domain, +modified as part of the python-ecdsa package. """ -from six import int2byte, b +import warnings +from six import int2byte from . import ellipticcurve from . import numbertheory from .util import bit_length @@ -69,16 +81,26 @@ class InvalidPointError(RuntimeError): class Signature(object): - """ECDSA signature.""" + """ + ECDSA signature. + + :ivar int r: the ``r`` element of the ECDSA signature + :ivar int s: the ``s`` element of the ECDSA signature + """ def __init__(self, r, s): self.r = r self.s = s def recover_public_keys(self, hash, generator): - """Returns two public keys for which the signature is valid - hash is signed hash - generator is the used generator of the signature + """ + Returns two public keys for which the signature is valid + + :param int hash: signed hash + :param AbstractPoint generator: is the generator used in creation + of the signature + :rtype: tuple(Public_key, Public_key) + :return: a pair of public keys that can validate the signature """ curve = generator.curve() n = generator.order() @@ -118,7 +140,7 @@ def __init__(self, generator, point, verify=True): :param bool verify: if True check if point is valid point on curve :raises InvalidPointError: if the point parameters are invalid or - point does not lie on the curve + point does not lay on the curve """ self.curve = generator.curve() @@ -131,7 +153,7 @@ def __init__(self, generator, point, verify=True): "The public point has x or y out of range." ) if verify and not self.curve.contains_point(point.x(), point.y()): - raise InvalidPointError("Point does not lie on the curve") + raise InvalidPointError("Point does not lay on the curve") if not n: raise InvalidPointError("Generator point must have order.") # for curve parameters with base point with cofactor 1, all points @@ -145,11 +167,20 @@ def __init__(self, generator, point, verify=True): raise InvalidPointError("Generator point order is bad.") def __eq__(self, other): + """Return True if the keys are identical, False otherwise. + + Note: for comparison, only placement on the same curve and point + equality is considered, use of the same generator point is not + considered. + """ if isinstance(other, Public_key): - """Return True if the points are identical, False otherwise.""" return self.curve == other.curve and self.point == other.point return NotImplemented + def __ne__(self, other): + """Return False if the keys are identical, True otherwise.""" + return not self == other + def verifies(self, hash, signature): """Verify that signature is a valid signature of hash. Return True if the signature is valid. @@ -188,14 +219,18 @@ def __init__(self, public_key, secret_multiplier): self.secret_multiplier = secret_multiplier def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" if isinstance(other, Private_key): - """Return True if the points are identical, False otherwise.""" return ( self.public_key == other.public_key and self.secret_multiplier == other.secret_multiplier ) return NotImplemented + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + def sign(self, hash, random_k): """Return a signature for the provided hash, using the provided random nonce. It is absolutely vital that random_k be an unpredictable @@ -235,11 +270,17 @@ def sign(self, hash, random_k): return Signature(r, s) -def int_to_string(x): +def int_to_string(x): # pragma: no cover """Convert integer x into a string of bytes, as per X9.62.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to util.number_to_string.", + DeprecationWarning, + ) assert x >= 0 if x == 0: - return b("\0") + return b"\0" result = [] while x: ordinal = x & 0xFF @@ -247,11 +288,17 @@ def int_to_string(x): x >>= 8 result.reverse() - return b("").join(result) + return b"".join(result) -def string_to_int(s): +def string_to_int(s): # pragma: no cover """Convert a string of bytes into an integer, as per X9.62.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to util.string_to_number.", + DeprecationWarning, + ) result = 0 for c in s: if not isinstance(c, int): @@ -260,9 +307,16 @@ def string_to_int(s): return result -def digest_integer(m): +def digest_integer(m): # pragma: no cover """Convert an integer into a string of bytes, compute - its SHA-1 hash, and convert the result to an integer.""" + its SHA-1 hash, and convert the result to an integer.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to a one-liner with util.number_to_string and " + "util.string_to_number methods.", + DeprecationWarning, + ) # # I don't expect this function to be used much. I wrote # it in order to be able to duplicate the examples @@ -294,6 +348,77 @@ def point_is_valid(generator, x, y): return True +# secp112r1 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 00F50B02 8E4D696E 67687561 51752904 72783FB1 +_a = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD2088"), 16) +_b = int(remove_whitespace("659E F8BA0439 16EEDE89 11702B22"), 16) +_Gx = int(remove_whitespace("09487239 995A5EE7 6B55F9C2 F098"), 16) +_Gy = int(remove_whitespace("A89C E5AF8724 C0A23E0E 0FF77500"), 16) +_r = int(remove_whitespace("DB7C 2ABF62E3 5E7628DF AC6561C5"), 16) +_h = 1 +curve_112r1 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r1 = ellipticcurve.PointJacobi( + curve_112r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp112r2 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 022757A1 114D69E 67687561 51755316 C05E0BD4 +_a = int(remove_whitespace("6127 C24C05F3 8A0AAAF6 5C0EF02C"), 16) +_b = int(remove_whitespace("51DE F1815DB5 ED74FCC3 4C85D709"), 16) +_Gx = int(remove_whitespace("4BA30AB5 E892B4E1 649DD092 8643"), 16) +_Gy = int(remove_whitespace("ADCD 46F5882E 3747DEF3 6E956E97"), 16) +_r = int(remove_whitespace("36DF 0AAFD8B8 D7597CA1 0520D04B"), 16) +_h = 4 +curve_112r2 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r2 = ellipticcurve.PointJacobi( + curve_112r2, _Gx, _Gy, 1, _r, generator=True +) + + +# secp128r1 curve +_p = int(remove_whitespace("FFFFFFFD FFFFFFFF FFFFFFFF FFFFFFFF"), 16) +# S = 000E0D4D 69E6768 75615175 0CC03A44 73D03679 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("E87579C1 1079F43D D824993C 2CEE5ED3"), 16) +_Gx = int(remove_whitespace("161FF752 8B899B2D 0C28607C A52C5B86"), 16) +_Gy = int(remove_whitespace("CF5AC839 5BAFEB13 C02DA292 DDED7A83"), 16) +_r = int(remove_whitespace("FFFFFFFE 00000000 75A30D1B 9038A115"), 16) +_h = 1 +curve_128r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_128r1 = ellipticcurve.PointJacobi( + curve_128r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp160r1 +_p = int(remove_whitespace("FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF 7FFFFFFF"), 16) +# S = 1053CDE4 2C14D696 E6768756 1517533B F3F83345 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("1C97BEFC 54BD7A8B 65ACF89F 81D4D4AD C565FA45"), 16) +_Gx = int( + remove_whitespace("4A96B568 8EF57328 46646989 68C38BB9 13CBFC82"), + 16, +) +_Gy = int( + remove_whitespace("23A62855 3168947D 59DCC912 04235137 7AC5FB32"), + 16, +) +_r = int( + remove_whitespace("01 00000000 00000000 0001F4C8 F927AED3 CA752257"), + 16, +) +_h = 1 +curve_160r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_160r1 = ellipticcurve.PointJacobi( + curve_160r1, _Gx, _Gy, 1, _r, generator=True +) + + # NIST Curve P-192: _p = 6277101735386680763835789423207666416083908700390324961279 _r = 6277101735386680763835789423176059013767194773182842284081 @@ -549,6 +674,18 @@ def point_is_valid(generator, x, y): curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True ) +# Brainpool P-160-t1 +_a = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620C +_b = 0x7A556B6DAE535B7B51ED2C4D7DAA7A0B5C55F380 +# _z = 0x24DBFF5DEC9B986BBFE5295A29BFBAE45E0F5D0B +_Gx = 0xB199B13B9B34EFC1397E64BAEB05ACC265FF2378 +_Gy = 0xADD6718B7C7C1961F0991B842443772152C9E0AD +_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 +curve_brainpoolp160t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp160t1 = ellipticcurve.PointJacobi( + curve_brainpoolp160t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-192-r1 _a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF _b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9 @@ -562,6 +699,19 @@ def point_is_valid(generator, x, y): curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True ) +# Brainpool P-192-t1 +_a = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86294 +_b = 0x13D56FFAEC78681E68F9DEB43B35BEC2FB68542E27897B79 +# _z = 0x1B6F5CC8DB4DC7AF19458A9CB80DC2295E5EB9C3732104CB +_Gx = 0x3AE9E58C82F63C30282E1FE7BBF43FA72C446AF6F4618129 +_Gy = 0x097E2C5667C2223A902AB5CA449D0084B7E5B3DE7CCC01C9 +_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 + +curve_brainpoolp192t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp192t1 = ellipticcurve.PointJacobi( + curve_brainpoolp192t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-224-r1 _a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43 _b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B @@ -575,6 +725,19 @@ def point_is_valid(generator, x, y): curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True ) +# Brainpool P-224-t1 +_a = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FC +_b = 0x4B337D934104CD7BEF271BF60CED1ED20DA14C08B3BB64F18A60888D +# _z = 0x2DF271E14427A346910CF7A2E6CFA7B3F484E5C2CCE1C8B730E28B3F +_Gx = 0x6AB1E344CE25FF3896424E7FFE14762ECB49F8928AC0C76029B4D580 +_Gy = 0x0374E9F5143E568CD23F3F4D7C0D4B1E41C8CC0D1C6ABD5F1A46DB4C +_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F + +curve_brainpoolp224t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp224t1 = ellipticcurve.PointJacobi( + curve_brainpoolp224t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-256-r1 _a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9 _b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6 @@ -588,6 +751,19 @@ def point_is_valid(generator, x, y): curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True ) +# Brainpool P-256-t1 +_a = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5374 +_b = 0x662C61C430D84EA4FE66A7733D0B76B7BF93EBC4AF2F49256AE58101FEE92B04 +# _z = 0x3E2D4BD9597B58639AE7AA669CAB9837CF5CF20A2C852D10F655668DFC150EF0 +_Gx = 0xA3E8EB3CC1CFE7B7732213B23A656149AFA142C47AAFBC2B79A191562E1305F4 +_Gy = 0x2D996C823439C56D7F7B22E14644417E69BCB6DE39D027001DABE8F35B25C9BE +_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + +curve_brainpoolp256t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp256t1 = ellipticcurve.PointJacobi( + curve_brainpoolp256t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-320-r1 _a = int( remove_whitespace( @@ -643,6 +819,61 @@ def point_is_valid(generator, x, y): curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True ) +# Brainpool P-320-t1 +_a = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC + 28FCD412B1F1B32E24""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + A7F561E038EB1ED560B3D147DB782013064C19F27ED27C6780AAF77FB8A547 + CEB5B4FEF422340353""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 15F75CAF668077F7E85B42EB01F0A81FF56ECD6191D55CB82B7D861458A18F +# EFC3E5AB7496F3C7B1""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 925BE9FB01AFC6FB4D3E7D4990010F813408AB106C4F09CB7EE07868CC136F + FF3357F624A21BED52""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 63BA3A7A27483EBF6671DBEF7ABB30EBEE084E58A0B077AD42A5A0989D1EE7 + 1B1B9BC0455FB0D2C3""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 + E98691555B44C59311""" + ), + 16, +) + +curve_brainpoolp320t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp320t1 = ellipticcurve.PointJacobi( + curve_brainpoolp320t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-384-r1 _a = int( remove_whitespace( @@ -698,6 +929,60 @@ def point_is_valid(generator, x, y): curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True ) +_a = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 + 23ACD3A729901D1A71874700133107EC50""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 7F519EADA7BDA81BD826DBA647910F8C4B9346ED8CCDC64E4B1ABD11756DCE + 1D2074AA263B88805CED70355A33B471EE""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 41DFE8DD399331F7166A66076734A89CD0D2BCDB7D068E44E1F378F41ECBAE +# 97D2D63DBC87BCCDDCCC5DA39E8589291C""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 18DE98B02DB9A306F2AFCD7235F72A819B80AB12EBD653172476FECD462AAB + FFC4FF191B946A5F54D8D0AA2F418808CC""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 25AB056962D30651A114AFD2755AD336747F93475B7A1FCA3B88F2B6A208CC + FE469408584DC2B2912675BF5B9E582928""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 + A7CF3AB6AF6B7FC3103B883202E9046565""" + ), + 16, +) + +curve_brainpoolp384t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp384t1 = ellipticcurve.PointJacobi( + curve_brainpoolp384t1, _Gx, _Gy, 1, _q, generator=True +) + # Brainpool P-512-r1 _a = int( remove_whitespace( @@ -752,3 +1037,58 @@ def point_is_valid(generator, x, y): generator_brainpoolp512r1 = ellipticcurve.PointJacobi( curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True ) + +# Brainpool P-512-t1 +_a = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F0""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 7CBBBCF9441CFAB76E1890E46884EAE321F70C0BCB4981527897504BEC3E36 + A62BCDFA2304976540F6450085F2DAE145C22553B465763689180EA2571867423E""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 12EE58E6764838B69782136F0F2D3BA06E27695716054092E60A80BEDB212B +# 64E585D90BCE13761F85C3F1D2A64E3BE8FEA2220F01EBA5EEB0F35DBD29D922AB""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 640ECE5C12788717B9C1BA06CBC2A6FEBA85842458C56DDE9DB1758D39C031 + 3D82BA51735CDB3EA499AA77A7D6943A64F7A3F25FE26F06B51BAA2696FA9035DA""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 5B534BD595F5AF0FA2C892376C84ACE1BB4E3019B71634C01131159CAE03CE + E9D9932184BEEF216BD71DF2DADF86A627306ECFF96DBB8BACE198B61E00F8B332""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" + ), + 16, +) + +curve_brainpoolp512t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp512t1 = ellipticcurve.PointJacobi( + curve_brainpoolp512t1, _Gx, _Gy, 1, _q, generator=True +) diff --git a/src/ecdsa/eddsa.py b/src/ecdsa/eddsa.py new file mode 100644 index 00000000..9769cfd8 --- /dev/null +++ b/src/ecdsa/eddsa.py @@ -0,0 +1,252 @@ +"""Implementation of Edwards Digital Signature Algorithm.""" + +import hashlib +from ._sha3 import shake_256 +from . import ellipticcurve +from ._compat import ( + remove_whitespace, + bit_length, + bytes_to_int, + int_to_bytes, + compat26_str, +) + +# edwards25519, defined in RFC7748 +_p = 2**255 - 19 +_a = -1 +_d = int( + remove_whitespace( + "370957059346694393431380835087545651895421138798432190163887855330" + "85940283555" + ) +) +_h = 8 + +_Gx = int( + remove_whitespace( + "151122213495354007725011514095885315114540126930418572060461132" + "83949847762202" + ) +) +_Gy = int( + remove_whitespace( + "463168356949264781694283940034751631413079938662562256157830336" + "03165251855960" + ) +) +_r = 2**252 + 0x14DEF9DEA2F79CD65812631A5CF5D3ED + + +def _sha512(data): + return hashlib.new("sha512", compat26_str(data)).digest() + + +curve_ed25519 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _sha512) +generator_ed25519 = ellipticcurve.PointEdwards( + curve_ed25519, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True +) + + +# edwards448, defined in RFC7748 +_p = 2**448 - 2**224 - 1 +_a = 1 +_d = -39081 % _p +_h = 4 + +_Gx = int( + remove_whitespace( + "224580040295924300187604334099896036246789641632564134246125461" + "686950415467406032909029192869357953282578032075146446173674602635" + "247710" + ) +) +_Gy = int( + remove_whitespace( + "298819210078481492676017930443930673437544040154080242095928241" + "372331506189835876003536878655418784733982303233503462500531545062" + "832660" + ) +) +_r = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D + + +def _shake256(data): + return shake_256(data, 114) + + +curve_ed448 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _shake256) +generator_ed448 = ellipticcurve.PointEdwards( + curve_ed448, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True +) + + +class PublicKey(object): + """Public key for the Edwards Digital Signature Algorithm.""" + + def __init__(self, generator, public_key, public_point=None): + self.generator = generator + self.curve = generator.curve() + self.__encoded = public_key + # plus one for the sign bit and round up + self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 + if len(public_key) != self.baselen: + raise ValueError( + "Incorrect size of the public key, expected: {0} bytes".format( + self.baselen + ) + ) + if public_point: + self.__point = public_point + else: + self.__point = ellipticcurve.PointEdwards.from_bytes( + self.curve, public_key + ) + + def __eq__(self, other): + if isinstance(other, PublicKey): + return ( + self.curve == other.curve and self.__encoded == other.__encoded + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + @property + def point(self): + return self.__point + + @point.setter + def point(self, other): + if self.__point != other: + raise ValueError("Can't change the coordinates of the point") + self.__point = other + + def public_point(self): + return self.__point + + def public_key(self): + return self.__encoded + + def verify(self, data, signature): + """Verify a Pure EdDSA signature over data.""" + data = compat26_str(data) + if len(signature) != 2 * self.baselen: + raise ValueError( + "Invalid signature length, expected: {0} bytes".format( + 2 * self.baselen + ) + ) + R = ellipticcurve.PointEdwards.from_bytes( + self.curve, signature[: self.baselen] + ) + S = bytes_to_int(signature[self.baselen :], "little") + if S >= self.generator.order(): + raise ValueError("Invalid signature") + + dom = bytearray() + if self.curve == curve_ed448: + dom = bytearray(b"SigEd448" + b"\x00\x00") + + k = bytes_to_int( + self.curve.hash_func(dom + R.to_bytes() + self.__encoded + data), + "little", + ) + + if self.generator * S != self.__point * k + R: + raise ValueError("Invalid signature") + + return True + + +class PrivateKey(object): + """Private key for the Edwards Digital Signature Algorithm.""" + + def __init__(self, generator, private_key): + self.generator = generator + self.curve = generator.curve() + # plus one for the sign bit and round up + self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 + if len(private_key) != self.baselen: + raise ValueError( + "Incorrect size of private key, expected: {0} bytes".format( + self.baselen + ) + ) + self.__private_key = bytes(private_key) + self.__h = bytearray(self.curve.hash_func(private_key)) + self.__public_key = None + + a = self.__h[: self.baselen] + a = self._key_prune(a) + scalar = bytes_to_int(a, "little") + self.__s = scalar + + @property + def private_key(self): + return self.__private_key + + def __eq__(self, other): + if isinstance(other, PrivateKey): + return ( + self.curve == other.curve + and self.__private_key == other.__private_key + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + def _key_prune(self, key): + # make sure the key is not in a small subgroup + h = self.curve.cofactor() + if h == 4: + h_log = 2 + elif h == 8: + h_log = 3 + else: + raise ValueError("Only cofactor 4 and 8 curves supported") + key[0] &= ~((1 << h_log) - 1) + + # ensure the highest bit is set but no higher + l = bit_length(self.curve.p()) + if l % 8 == 0: + key[-1] = 0 + key[-2] |= 0x80 + else: + key[-1] = key[-1] & (1 << (l % 8)) - 1 | 1 << (l % 8) - 1 + return key + + def public_key(self): + """Generate the public key based on the included private key""" + if self.__public_key: + return self.__public_key + + public_point = self.generator * self.__s + + self.__public_key = PublicKey( + self.generator, public_point.to_bytes(), public_point + ) + + return self.__public_key + + def sign(self, data): + """Perform a Pure EdDSA signature over data.""" + data = compat26_str(data) + A = self.public_key().public_key() + + prefix = self.__h[self.baselen :] + + dom = bytearray() + if self.curve == curve_ed448: + dom = bytearray(b"SigEd448" + b"\x00\x00") + + r = bytes_to_int(self.curve.hash_func(dom + prefix + data), "little") + R = (self.generator * r).to_bytes() + + k = bytes_to_int(self.curve.hash_func(dom + R + A + data), "little") + k %= self.generator.order() + + S = (r + k * self.__s) % self.generator.order() + + return R + int_to_bytes(S, self.baselen, "little") diff --git a/src/ecdsa/ellipticcurve.py b/src/ecdsa/ellipticcurve.py index d94e3ead..a982c1e4 100644 --- a/src/ecdsa/ellipticcurve.py +++ b/src/ecdsa/ellipticcurve.py @@ -25,13 +25,12 @@ # Signature checking (5.4.2): # - Verify that r and s are in [1,n-1]. # -# Version of 2008.11.25. -# # Revision history: # 2005.12.31 - Initial version. # 2008.11.25 - Change CurveFp.is_on to contains_point. # # Written in 2005 by Peter Pearson and placed in the public domain. +# Modified extensively as part of python-ecdsa. from __future__ import division @@ -39,7 +38,7 @@ from gmpy2 import mpz GMPY = True -except ImportError: +except ImportError: # pragma: no branch try: from gmpy import mpz @@ -50,14 +49,19 @@ from six import python_2_unicode_compatible from . import numbertheory -from ._rwlock import RWLock +from ._compat import normalise_bytes, int_to_bytes, bit_length, bytes_to_int +from .errors import MalformedPointError +from .util import orderlen, string_to_number, number_to_string @python_2_unicode_compatible class CurveFp(object): - """Elliptic Curve over the field of integers modulo a prime.""" + """ + :term:`Short Weierstrass Elliptic Curve ` over a + prime field. + """ - if GMPY: + if GMPY: # pragma: no branch def __init__(self, p, a, b, h=None): """ @@ -75,7 +79,7 @@ def __init__(self, p, a, b, h=None): # gmpy with it self.__h = h - else: + else: # pragma: no branch def __init__(self, p, a, b, h=None): """ @@ -92,17 +96,25 @@ def __init__(self, p, a, b, h=None): self.__h = h def __eq__(self, other): + """Return True if other is an identical curve, False otherwise. + + Note: the value of the cofactor of the curve is not taken into account + when comparing curves, as it's derived from the base point and + intrinsic curve characteristic (but it's complex to compute), + only the prime and curve parameters are considered. + """ if isinstance(other, CurveFp): - """Return True if the curves are identical, False otherwise.""" + p = self.__p return ( self.__p == other.__p - and self.__a == other.__a - and self.__b == other.__b + and self.__a % p == other.__a % p + and self.__b % p == other.__b % p ) return NotImplemented def __ne__(self, other): - return not (self == other) + """Return False if other is an identical curve, True otherwise.""" + return not self == other def __hash__(self): return hash((self.__p, self.__a, self.__b)) @@ -124,17 +136,376 @@ def contains_point(self, x, y): return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0 def __str__(self): - return "CurveFp(p=%d, a=%d, b=%d, h=%d)" % ( + if self.__h is not None: + return "CurveFp(p={0}, a={1}, b={2}, h={3})".format( + self.__p, + self.__a, + self.__b, + self.__h, + ) + return "CurveFp(p={0}, a={1}, b={2})".format( self.__p, self.__a, self.__b, - self.__h, ) -class PointJacobi(object): +class CurveEdTw(object): + """Parameters for a Twisted Edwards Elliptic Curve""" + + if GMPY: # pragma: no branch + + def __init__(self, p, a, d, h=None, hash_func=None): + """ + The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). + + h is the cofactor of the curve. + hash_func is the hash function associated with the curve + (like SHA-512 for Ed25519) + """ + self.__p = mpz(p) + self.__a = mpz(a) + self.__d = mpz(d) + self.__h = h + self.__hash_func = hash_func + + else: + + def __init__(self, p, a, d, h=None, hash_func=None): + """ + The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). + + h is the cofactor of the curve. + hash_func is the hash function associated with the curve + (like SHA-512 for Ed25519) + """ + self.__p = p + self.__a = a + self.__d = d + self.__h = h + self.__hash_func = hash_func + + def __eq__(self, other): + """Returns True if other is an identical curve.""" + if isinstance(other, CurveEdTw): + p = self.__p + return ( + self.__p == other.__p + and self.__a % p == other.__a % p + and self.__d % p == other.__d % p + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the other is an identical curve, True otherwise.""" + return not self == other + + def __hash__(self): + return hash((self.__p, self.__a, self.__d)) + + def contains_point(self, x, y): + """Is the point (x, y) on this curve?""" + return ( + self.__a * x * x + y * y - 1 - self.__d * x * x * y * y + ) % self.__p == 0 + + def p(self): + return self.__p + + def a(self): + return self.__a + + def d(self): + return self.__d + + def hash_func(self, data): + return self.__hash_func(data) + + def cofactor(self): + return self.__h + + def __str__(self): + if self.__h is not None: + return "CurveEdTw(p={0}, a={1}, d={2}, h={3})".format( + self.__p, + self.__a, + self.__d, + self.__h, + ) + return "CurveEdTw(p={0}, a={1}, d={2})".format( + self.__p, + self.__a, + self.__d, + ) + + +class AbstractPoint(object): + """Class for common methods of elliptic curve points.""" + + @staticmethod + def _from_raw_encoding(data, raw_encoding_length): + """ + Decode public point from :term:`raw encoding`. + + :term:`raw encoding` is the same as the :term:`uncompressed` encoding, + but without the 0x04 byte at the beginning. + """ + # real assert, from_bytes() should not call us with different length + assert len(data) == raw_encoding_length + xs = data[: raw_encoding_length // 2] + ys = data[raw_encoding_length // 2 :] + # real assert, raw_encoding_length is calculated by multiplying an + # integer by two so it will always be even + assert len(xs) == raw_encoding_length // 2 + assert len(ys) == raw_encoding_length // 2 + coord_x = string_to_number(xs) + coord_y = string_to_number(ys) + + return coord_x, coord_y + + @staticmethod + def _from_compressed(data, curve): + """Decode public point from compressed encoding.""" + if data[:1] not in (b"\x02", b"\x03"): + raise MalformedPointError("Malformed compressed point encoding") + + is_even = data[:1] == b"\x02" + x = string_to_number(data[1:]) + p = curve.p() + alpha = (pow(x, 3, p) + (curve.a() * x) + curve.b()) % p + try: + beta = numbertheory.square_root_mod_prime(alpha, p) + except numbertheory.Error as e: + raise MalformedPointError( + "Encoding does not correspond to a point on curve", e + ) + if is_even == bool(beta & 1): + y = p - beta + else: + y = beta + return x, y + + @classmethod + def _from_hybrid(cls, data, raw_encoding_length, validate_encoding): + """Decode public point from hybrid encoding.""" + # real assert, from_bytes() should not call us with different types + assert data[:1] in (b"\x06", b"\x07") + + # primarily use the uncompressed as it's easiest to handle + x, y = cls._from_raw_encoding(data[1:], raw_encoding_length) + + # but validate if it's self-consistent if we're asked to do that + if validate_encoding and ( + y & 1 + and data[:1] != b"\x07" + or (not y & 1) + and data[:1] != b"\x06" + ): + raise MalformedPointError("Inconsistent hybrid point encoding") + + return x, y + + @classmethod + def _from_edwards(cls, curve, data): + """Decode a point on an Edwards curve.""" + data = bytearray(data) + p = curve.p() + # add 1 for the sign bit and then round up + exp_len = (bit_length(p) + 1 + 7) // 8 + if len(data) != exp_len: + raise MalformedPointError("Point length doesn't match the curve.") + x_0 = (data[-1] & 0x80) >> 7 + + data[-1] &= 0x80 - 1 + + y = bytes_to_int(data, "little") + if GMPY: + y = mpz(y) + + x2 = ( + (y * y - 1) + * numbertheory.inverse_mod(curve.d() * y * y - curve.a(), p) + % p + ) + + try: + x = numbertheory.square_root_mod_prime(x2, p) + except numbertheory.Error as e: + raise MalformedPointError( + "Encoding does not correspond to a point on curve", e + ) + + if x % 2 != x_0: + x = -x % p + + return x, y + + @classmethod + def from_bytes( + cls, curve, data, validate_encoding=True, valid_encodings=None + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + Note: generally you will want to call the ``from_bytes()`` method of + either a child class, PointJacobi or Point. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: x and y coordinates of the encoded point + :rtype: tuple(int, int) + """ + if not valid_encodings: + valid_encodings = set( + ["uncompressed", "compressed", "hybrid", "raw"] + ) + if not all( + i in set(("uncompressed", "compressed", "hybrid", "raw")) + for i in valid_encodings + ): + raise ValueError( + "Only uncompressed, compressed, hybrid or raw encoding " + "supported." + ) + data = normalise_bytes(data) + + if isinstance(curve, CurveEdTw): + return cls._from_edwards(curve, data) + + key_len = len(data) + raw_encoding_length = 2 * orderlen(curve.p()) + if key_len == raw_encoding_length and "raw" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data, raw_encoding_length + ) + elif key_len == raw_encoding_length + 1 and ( + "hybrid" in valid_encodings or "uncompressed" in valid_encodings + ): + if data[:1] in (b"\x06", b"\x07") and "hybrid" in valid_encodings: + coord_x, coord_y = cls._from_hybrid( + data, raw_encoding_length, validate_encoding + ) + elif data[:1] == b"\x04" and "uncompressed" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data[1:], raw_encoding_length + ) + else: + raise MalformedPointError( + "Invalid X9.62 encoding of the public point" + ) + elif ( + key_len == raw_encoding_length // 2 + 1 + and "compressed" in valid_encodings + ): + coord_x, coord_y = cls._from_compressed(data, curve) + else: + raise MalformedPointError( + "Length of string does not match lengths of " + "any of the enabled ({0}) encodings of the " + "curve.".format(", ".join(valid_encodings)) + ) + return coord_x, coord_y + + def _raw_encode(self): + """Convert the point to the :term:`raw encoding`.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + y_str = number_to_string(self.y(), prime) + return x_str + y_str + + def _compressed_encode(self): + """Encode the point into the compressed form.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + if self.y() & 1: + return b"\x03" + x_str + return b"\x02" + x_str + + def _hybrid_encode(self): + """Encode the point into the hybrid form.""" + raw_enc = self._raw_encode() + if self.y() & 1: + return b"\x07" + raw_enc + return b"\x06" + raw_enc + + def _edwards_encode(self): + """Encode the point according to RFC8032 encoding.""" + self.scale() + x, y, p = self.x(), self.y(), self.curve().p() + + # add 1 for the sign bit and then round up + enc_len = (bit_length(p) + 1 + 7) // 8 + y_str = int_to_bytes(y, enc_len, "little") + if x % 2: + y_str[-1] |= 0x80 + return y_str + + def to_bytes(self, encoding="raw"): + """ + Convert the point to a byte string. + + The method by default uses the :term:`raw encoding` (specified + by `encoding="raw"`. It can also output points in :term:`uncompressed`, + :term:`compressed`, and :term:`hybrid` formats. + + For points on Edwards curves `encoding` is ignored and only the + encoding defined in RFC 8032 is supported. + + :return: :term:`raw encoding` of a public on the curve + :rtype: bytes + """ + assert encoding in ("raw", "uncompressed", "compressed", "hybrid") + curve = self.curve() + if isinstance(curve, CurveEdTw): + return self._edwards_encode() + elif encoding == "raw": + return self._raw_encode() + elif encoding == "uncompressed": + return b"\x04" + self._raw_encode() + elif encoding == "hybrid": + return self._hybrid_encode() + else: + return self._compressed_encode() + + @staticmethod + def _naf(mult): + """Calculate non-adjacent form of number.""" + ret = [] + while mult: + if mult % 2: + nd = mult % 4 + if nd >= 2: + nd -= 4 + ret.append(nd) + mult -= nd + else: + ret.append(0) + mult //= 2 + return ret + + +class PointJacobi(AbstractPoint): """ - Point on an elliptic curve. Uses Jacobi coordinates. + Point on a short Weierstrass elliptic curve. Uses Jacobi coordinates. In Jacobian coordinates, there are three parameters, X, Y and Z. They correspond to affine parameters 'x' and 'y' like so: @@ -158,66 +529,115 @@ def __init__(self, curve, x, y, z, order=None, generator=False): generator=True :param bool generator: the point provided is a curve generator, as such, it will be commonly used with scalar multiplication. This will - cause to precompute multiplication table for it + cause to precompute multiplication table generation for it """ + super(PointJacobi, self).__init__() self.__curve = curve - # since it's generally better (faster) to use scaled points vs unscaled - # ones, use writer-biased RWLock for locking: - self._scale_lock = RWLock() - if GMPY: - self.__x = mpz(x) - self.__y = mpz(y) - self.__z = mpz(z) + if GMPY: # pragma: no branch + self.__coords = (mpz(x), mpz(y), mpz(z)) self.__order = order and mpz(order) - else: - self.__x = x - self.__y = y - self.__z = z + else: # pragma: no branch + self.__coords = (x, y, z) self.__order = order + self.__generator = generator self.__precompute = [] - if generator: - assert order - i = 1 - order *= 2 - doubler = PointJacobi(curve, x, y, z, order) - order *= 2 - self.__precompute.append((doubler.x(), doubler.y())) - - while i < order: - i *= 2 - doubler = doubler.double().scale() - self.__precompute.append((doubler.x(), doubler.y())) + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + generator=False, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: the point provided is a curve generator, as + such, it will be commonly used with scalar multiplication. This + will cause to precompute multiplication table generation for it + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Point on curve + :rtype: PointJacobi + """ + coord_x, coord_y = super(PointJacobi, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return PointJacobi(curve, coord_x, coord_y, 1, order, generator) + + def _maybe_precompute(self): + if not self.__generator or self.__precompute: + return + + # since this code will execute just once, and it's fully deterministic, + # depend on atomicity of the last assignment to switch from empty + # self.__precompute to filled one and just ignore the unlikely + # situation when two threads execute it at the same time (as it won't + # lead to inconsistent __precompute) + order = self.__order + assert order + precompute = [] + i = 1 + order *= 2 + coord_x, coord_y, coord_z = self.__coords + doubler = PointJacobi(self.__curve, coord_x, coord_y, coord_z, order) + order *= 2 + precompute.append((doubler.x(), doubler.y())) + + while i < order: + i *= 2 + doubler = doubler.double().scale() + precompute.append((doubler.x(), doubler.y())) + + self.__precompute = precompute def __getstate__(self): - try: - self._scale_lock.reader_acquire() - state = self.__dict__.copy() - finally: - self._scale_lock.reader_release() - del state["_scale_lock"] + # while this code can execute at the same time as _maybe_precompute() + # is updating the __precompute or scale() is updating the __coords, + # there is no requirement for consistency between __coords and + # __precompute + state = self.__dict__.copy() return state def __setstate__(self, state): self.__dict__.update(state) - self._scale_lock = RWLock() def __eq__(self, other): - """Compare two points with each-other.""" - try: - self._scale_lock.reader_acquire() - if other is INFINITY: - return not self.__y or not self.__z - x1, y1, z1 = self.__x, self.__y, self.__z - finally: - self._scale_lock.reader_release() + """Compare for equality two points with each-other. + + Note: only points that lay on the same curve can be equal. + """ + x1, y1, z1 = self.__coords + if other is INFINITY: + return not z1 if isinstance(other, Point): x2, y2, z2 = other.x(), other.y(), 1 elif isinstance(other, PointJacobi): - try: - other._scale_lock.reader_acquire() - x2, y2, z2 = other.__x, other.__y, other.__z - finally: - other._scale_lock.reader_release() + x2, y2, z2 = other.__coords else: return NotImplemented if self.__curve != other.curve(): @@ -234,6 +654,10 @@ def __eq__(self, other): y1 * zz2 * z2 - y2 * zz1 * z1 ) % p == 0 + def __ne__(self, other): + """Compare for inequality two points with each-other.""" + return not self == other + def order(self): """Return the order of the point. @@ -254,17 +678,12 @@ def x(self): call x() and y() on the returned instance. Or call `scale()` and then x() and y() on the returned instance. """ - try: - self._scale_lock.reader_acquire() - if self.__z == 1: - return self.__x - x = self.__x - z = self.__z - finally: - self._scale_lock.reader_release() + x, _, z = self.__coords + if z == 1: + return x p = self.__curve.p() z = numbertheory.inverse_mod(z, p) - return x * z ** 2 % p + return x * z**2 % p def y(self): """ @@ -275,17 +694,12 @@ def y(self): call x() and y() on the returned instance. Or call `scale()` and then x() and y() on the returned instance. """ - try: - self._scale_lock.reader_acquire() - if self.__z == 1: - return self.__y - y = self.__y - z = self.__z - finally: - self._scale_lock.reader_release() + _, y, z = self.__coords + if z == 1: + return y p = self.__curve.p() z = numbertheory.inverse_mod(z, p) - return y * z ** 3 % p + return y * z**3 % p def scale(self): """ @@ -293,37 +707,30 @@ def scale(self): Modifies point in place, returns self. """ - try: - self._scale_lock.reader_acquire() - if self.__z == 1: - return self - finally: - self._scale_lock.reader_release() + x, y, z = self.__coords + if z == 1: + return self - try: - self._scale_lock.writer_acquire() - # scaling already scaled point is safe (as inverse of 1 is 1) and - # quick so we don't need to optimise for the unlikely event when - # two threads hit the lock at the same time - p = self.__curve.p() - z_inv = numbertheory.inverse_mod(self.__z, p) - zz_inv = z_inv * z_inv % p - self.__x = self.__x * zz_inv % p - self.__y = self.__y * zz_inv * z_inv % p - # we are setting the z last so that the check above will return - # true only after all values were already updated - self.__z = 1 - finally: - self._scale_lock.writer_release() + # scaling is deterministic, so even if two threads execute the below + # code at the same time, they will set __coords to the same value + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(z, p) + zz_inv = z_inv * z_inv % p + x = x * zz_inv % p + y = y * zz_inv * z_inv % p + self.__coords = (x, y, 1) return self def to_affine(self): """Return point in affine form.""" - if not self.__y or not self.__z: + _, _, z = self.__coords + p = self.__curve.p() + if not (z % p): return INFINITY self.scale() - # after point is scaled, it's immutable, so no need to perform locking - return Point(self.__curve, self.__x, self.__y, self.__order) + x, y, z = self.__coords + assert z == 1 + return Point(self.__curve, x, y, self.__order) @staticmethod def from_affine(point, generator=False): @@ -337,7 +744,8 @@ def from_affine(point, generator=False): point.curve(), point.x(), point.y(), 1, point.order(), generator ) - # plese note that all the methods that use the equations from hyperelliptic + # please note that all the methods that use the equations from + # hyperelliptic # are formatted in a way to maximise performance. # Things that make code faster: multiplying instead of taking to the power # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and @@ -353,7 +761,7 @@ def _double_with_z_1(self, X1, Y1, p, a): # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl XX, YY = X1 * X1 % p, Y1 * Y1 % p if not YY: - return 0, 0, 1 + return 0, 0, 0 YYYY = YY * YY % p S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p M = 3 * XX + a @@ -368,12 +776,12 @@ def _double(self, X1, Y1, Z1, p, a): if Z1 == 1: return self._double_with_z_1(X1, Y1, p, a) if not Z1: - return 0, 0, 1 + return 0, 0, 0 # after: # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl XX, YY = X1 * X1 % p, Y1 * Y1 % p if not YY: - return 0, 0, 1 + return 0, 0, 0 YYYY = YY * YY % p ZZ = Z1 * Z1 % p S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p @@ -387,20 +795,16 @@ def _double(self, X1, Y1, Z1, p, a): def double(self): """Add a point to itself.""" - if not self.__y: + X1, Y1, Z1 = self.__coords + + if not Z1: return INFINITY p, a = self.__curve.p(), self.__curve.a() - try: - self._scale_lock.reader_acquire() - X1, Y1, Z1 = self.__x, self.__y, self.__z - finally: - self._scale_lock.reader_release() - X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a) - if not Y3 or not Z3: + if not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) @@ -416,7 +820,7 @@ def _add_with_z_1(self, X1, Y1, X2, Y2, p): if not H and not r: return self._double_with_z_1(X1, Y1, p, self.__curve.a()) V = X1 * I - X3 = (r ** 2 - J - 2 * V) % p + X3 = (r**2 - J - 2 * V) % p Y3 = (r * (V - X3) - 2 * Y1 * J) % p Z3 = 2 * H % p return X3, Y3, Z3 @@ -484,10 +888,10 @@ def __radd__(self, other): def _add(self, X1, Y1, Z1, X2, Y2, Z2, p): """add two points, select fastest method.""" - if not Y1 or not Z1: - return X2, Y2, Z2 - if not Y2 or not Z2: - return X1, Y1, Z1 + if not Z1: + return X2 % p, Y2 % p, Z2 % p + if not Z2: + return X1 % p, Y1 % p, Z1 % p if Z1 == Z2: if Z1 == 1: return self._add_with_z_1(X1, Y1, X2, Y2, p) @@ -510,19 +914,12 @@ def __add__(self, other): raise ValueError("The other point is on different curve") p = self.__curve.p() - try: - self._scale_lock.reader_acquire() - X1, Y1, Z1 = self.__x, self.__y, self.__z - finally: - self._scale_lock.reader_release() - try: - other._scale_lock.reader_acquire() - X2, Y2, Z2 = other.__x, other.__y, other.__z - finally: - other._scale_lock.reader_release() + X1, Y1, Z1 = self.__coords + X2, Y2, Z2 = other.__coords + X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p) - if not Y3 or not Z3: + if not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) @@ -532,7 +929,7 @@ def __rmul__(self, other): def _mul_precompute(self, other): """Multiply point by integer with precomputation table.""" - X3, Y3, Z3, p = 0, 0, 1, self.__curve.p() + X3, Y3, Z3, p = 0, 0, 0, self.__curve.p() _add = self._add for X2, Y2 in self.__precompute: if other % 2: @@ -545,42 +942,26 @@ def _mul_precompute(self, other): else: other //= 2 - if not Y3 or not Z3: + if not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - @staticmethod - def _naf(mult): - """Calculate non-adjacent form of number.""" - ret = [] - while mult: - if mult % 2: - nd = mult % 4 - if nd >= 2: - nd = nd - 4 - ret += [nd] - mult -= nd - else: - ret += [0] - mult //= 2 - return ret - def __mul__(self, other): """Multiply point by an integer.""" - if not self.__y or not other: + if not self.__coords[1] or not other: return INFINITY if other == 1: return self if self.__order: # order*2 as a protection for Minerva other = other % (self.__order * 2) + self._maybe_precompute() if self.__precompute: return self._mul_precompute(other) self = self.scale() - # once scaled, point is immutable, not need to lock - X2, Y2 = self.__x, self.__y - X3, Y3, Z3 = 0, 0, 1 + X2, Y2, _ = self.__coords + X3, Y3, Z3 = 0, 0, 0 p, a = self.__curve.p(), self.__curve.a() _double = self._double _add = self._add @@ -593,34 +974,27 @@ def __mul__(self, other): elif i > 0: X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) - if not Y3 or not Z3: + if not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - @staticmethod - def _leftmost_bit(x): - """Return integer with the same magnitude as x but only one bit set""" - assert x > 0 - result = 1 - while result <= x: - result = 2 * result - return result // 2 - def mul_add(self, self_mul, other, other_mul): """ - Do two multiplications at the same time, add results. + Do two multiplications at the same time, add results. - calculates self*self_mul + other*other_mul - """ - if other is INFINITY or other_mul == 0: + calculates self*self_mul + other*other_mul + """ + if other == INFINITY or other_mul == 0: return self * self_mul if self_mul == 0: return other * other_mul if not isinstance(other, PointJacobi): other = PointJacobi.from_affine(other) # when the points have precomputed answers, then multiplying them alone - # is faster (as it uses NAF) + # is faster (as it uses NAF and no point doublings) + self._maybe_precompute() + other._maybe_precompute() if self.__precompute and other.__precompute: return self * self_mul + other * other_mul @@ -628,55 +1002,94 @@ def mul_add(self, self_mul, other, other_mul): self_mul = self_mul % self.__order other_mul = other_mul % self.__order - i = self._leftmost_bit(max(self_mul, other_mul)) * 2 - X3, Y3, Z3 = 0, 0, 1 + # (X3, Y3, Z3) is the accumulator + X3, Y3, Z3 = 0, 0, 0 p, a = self.__curve.p(), self.__curve.a() - self = self.scale() - # after scaling, point is immutable, no need for locking - X1, Y1 = self.__x, self.__y - other = other.scale() - X2, Y2 = other.__x, other.__y - both = self + other - if both is INFINITY: - X4, Y4 = 0, 0 - else: - both.scale() - X4, Y4 = both.__x, both.__y + + # as we have 6 unique points to work with, we can't scale all of them, + # but do scale the ones that are used most often + self.scale() + X1, Y1, Z1 = self.__coords + other.scale() + X2, Y2, Z2 = other.__coords + _double = self._double _add = self._add - while i > 1: + + # with NAF we have 3 options: no add, subtract, add + # so with 2 points, we have 9 combinations: + # 0, -A, +A, -B, -A-B, +A-B, +B, -A+B, +A+B + # so we need 4 combined points: + mAmB_X, mAmB_Y, mAmB_Z = _add(X1, -Y1, Z1, X2, -Y2, Z2, p) + pAmB_X, pAmB_Y, pAmB_Z = _add(X1, Y1, Z1, X2, -Y2, Z2, p) + mApB_X, mApB_Y, mApB_Z = pAmB_X, -pAmB_Y, pAmB_Z + pApB_X, pApB_Y, pApB_Z = mAmB_X, -mAmB_Y, mAmB_Z + # when the self and other sum to infinity, we need to add them + # one by one to get correct result but as that's very unlikely to + # happen in regular operation, we don't need to optimise this case + if not pApB_Z: + return self * self_mul + other * other_mul + + # gmp object creation has cumulatively higher overhead than the + # speedup we get from calculating the NAF using gmp so ensure use + # of int() + self_naf = list(reversed(self._naf(int(self_mul)))) + other_naf = list(reversed(self._naf(int(other_mul)))) + # ensure that the lists are the same length (zip() will truncate + # longer one otherwise) + if len(self_naf) < len(other_naf): + self_naf = [0] * (len(other_naf) - len(self_naf)) + self_naf + elif len(self_naf) > len(other_naf): + other_naf = [0] * (len(self_naf) - len(other_naf)) + other_naf + + for A, B in zip(self_naf, other_naf): X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) - i = i // 2 - if self_mul & i and other_mul & i: - X3, Y3, Z3 = _add(X3, Y3, Z3, X4, Y4, 1, p) - elif self_mul & i: - X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, 1, p) - elif other_mul & i: - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) + # conditions ordered from most to least likely + if A == 0: + if B == 0: + pass + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, Z2, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, Z2, p) + elif A < 0: + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, -Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, mAmB_X, mAmB_Y, mAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, mApB_X, mApB_Y, mApB_Z, p) + else: + assert A > 0 + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, pAmB_X, pAmB_Y, pAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, pApB_X, pApB_Y, pApB_Z, p) - if not Y3 or not Z3: + if not Z3: return INFINITY return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) def __neg__(self): """Return negated point.""" - try: - self._scale_lock.reader_acquire() - return PointJacobi( - self.__curve, self.__x, -self.__y, self.__z, self.__order - ) - finally: - self._scale_lock.reader_release() + x, y, z = self.__coords + return PointJacobi(self.__curve, x, -y, z, self.__order) -class Point(object): - """A point on an elliptic curve. Altering x and y is forbidding, - but they can be read by the x() and y() methods.""" +class Point(AbstractPoint): + """A point on a short Weierstrass elliptic curve. Altering x and y is + forbidden, but they can be read by the x() and y() methods.""" def __init__(self, curve, x, y, order=None): """curve, x, y, order; order (optional) is the order of this point.""" + super(Point, self).__init__() self.__curve = curve if GMPY: self.__x = x and mpz(x) @@ -695,8 +1108,56 @@ def __init__(self, curve, x, y, order=None): if curve and curve.cofactor() != 1 and order: assert self * order == INFINITY + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Point on curve + :rtype: Point + """ + coord_x, coord_y = super(Point, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return Point(curve, coord_x, coord_y, order) + def __eq__(self, other): - """Return True if the points are identical, False otherwise.""" + """Return True if the points are identical, False otherwise. + + Note: only points that lay on the same curve can be equal. + """ + if other is INFINITY: + return self.__x is None or self.__y is None if isinstance(other, Point): return ( self.__curve == other.__curve @@ -705,6 +1166,10 @@ def __eq__(self, other): ) return NotImplemented + def __ne__(self, other): + """Returns False if points are identical, True otherwise.""" + return not self == other + def __neg__(self): return Point(self.__curve, self.__x, self.__curve.p() - self.__y) @@ -759,17 +1224,22 @@ def leftmost_bit(x): # From X9.62 D.3.2: e3 = 3 * e - negative_self = Point(self.__curve, self.__x, -self.__y, self.__order) + negative_self = Point( + self.__curve, + self.__x, + (-self.__y) % self.__curve.p(), + self.__order, + ) i = leftmost_bit(e3) // 2 result = self - # print_("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) + # print("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) while i > 1: result = result.double() if (e3 & i) != 0 and (e & i) == 0: result = result + self if (e3 & i) == 0 and (e & i) != 0: result = result + negative_self - # print_(". . . i = %d, result = %s" % ( i, result )) + # print(". . . i = %d, result = %s" % ( i, result )) i = i // 2 return result @@ -786,7 +1256,6 @@ def __str__(self): def double(self): """Return a new point that is twice the old.""" - if self == INFINITY: return INFINITY @@ -800,6 +1269,9 @@ def double(self): * numbertheory.inverse_mod(2 * self.__y, p) ) % p + if not l: + return INFINITY + x3 = (l * l - 2 * self.__x) % p y3 = (l * (self.__x - x3) - self.__y) % p @@ -818,5 +1290,320 @@ def order(self): return self.__order +class PointEdwards(AbstractPoint): + """Point on Twisted Edwards curve. + + Internally represents the coordinates on the curve using four parameters, + X, Y, Z, T. They correspond to affine parameters 'x' and 'y' like so: + + x = X / Z + y = Y / Z + x*y = T / Z + """ + + def __init__(self, curve, x, y, z, t, order=None, generator=False): + """ + Initialise a point that uses the extended coordinates internally. + """ + super(PointEdwards, self).__init__() + self.__curve = curve + if GMPY: # pragma: no branch + self.__coords = (mpz(x), mpz(y), mpz(z), mpz(t)) + self.__order = order and mpz(order) + else: # pragma: no branch + self.__coords = (x, y, z, t) + self.__order = order + self.__generator = generator + self.__precompute = [] + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=None, + valid_encodings=None, + order=None, + generator=False, + ): + """ + Initialise the object from byte encoding of a point. + + `validate_encoding` and `valid_encodings` are provided for + compatibility with Weierstrass curves, they are ignored for Edwards + points. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.ellipticcurve.CurveEdTw + :param None validate_encoding: Ignored, encoding is always validated + :param None valid_encodings: Ignored, there is just one encoding + supported + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: Flag to mark the point as a curve generator, + this will cause the library to pre-compute some values to + make repeated usages of the point much faster + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Initialised point on an Edwards curve + :rtype: PointEdwards + """ + coord_x, coord_y = super(PointEdwards, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return PointEdwards( + curve, coord_x, coord_y, 1, coord_x * coord_y, order, generator + ) + + def _maybe_precompute(self): + if not self.__generator or self.__precompute: + return self.__precompute + + # since this code will execute just once, and it's fully deterministic, + # depend on atomicity of the last assignment to switch from empty + # self.__precompute to filled one and just ignore the unlikely + # situation when two threads execute it at the same time (as it won't + # lead to inconsistent __precompute) + order = self.__order + assert order + precompute = [] + i = 1 + order *= 2 + coord_x, coord_y, coord_z, coord_t = self.__coords + prime = self.__curve.p() + + doubler = PointEdwards( + self.__curve, coord_x, coord_y, coord_z, coord_t, order + ) + # for "protection" against Minerva we need 1 or 2 more bits depending + # on order bit size, but it's easier to just calculate one + # point more always + order *= 4 + + while i < order: + doubler = doubler.scale() + coord_x, coord_y = doubler.x(), doubler.y() + coord_t = coord_x * coord_y % prime + precompute.append((coord_x, coord_y, coord_t)) + + i *= 2 + doubler = doubler.double() + + self.__precompute = precompute + return self.__precompute + + def x(self): + """Return affine x coordinate.""" + X1, _, Z1, _ = self.__coords + if Z1 == 1: + return X1 + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + return X1 * z_inv % p + + def y(self): + """Return affine y coordinate.""" + _, Y1, Z1, _ = self.__coords + if Z1 == 1: + return Y1 + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + return Y1 * z_inv % p + + def curve(self): + """Return the curve of the point.""" + return self.__curve + + def order(self): + return self.__order + + def scale(self): + """ + Return point scaled so that z == 1. + + Modifies point in place, returns self. + """ + X1, Y1, Z1, _ = self.__coords + if Z1 == 1: + return self + + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + x = X1 * z_inv % p + y = Y1 * z_inv % p + t = x * y % p + self.__coords = (x, y, 1, t) + return self + + def __eq__(self, other): + """Compare for equality two points with each-other. + + Note: only points on the same curve can be equal. + """ + x1, y1, z1, t1 = self.__coords + if other is INFINITY: + return not x1 or not t1 + if isinstance(other, PointEdwards): + x2, y2, z2, t2 = other.__coords + else: + return NotImplemented + if self.__curve != other.curve(): + return False + p = self.__curve.p() + + # cross multiply to eliminate divisions + xn1 = x1 * z2 % p + xn2 = x2 * z1 % p + yn1 = y1 * z2 % p + yn2 = y2 * z1 % p + return xn1 == xn2 and yn1 == yn2 + + def __ne__(self, other): + """Compare for inequality two points with each-other.""" + return not self == other + + def _add(self, X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a): + """add two points, assume sane parameters.""" + # after add-2008-hwcd-2 + # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html + # NOTE: there are more efficient formulas for Z1 or Z2 == 1 + A = X1 * X2 % p + B = Y1 * Y2 % p + C = Z1 * T2 % p + D = T1 * Z2 % p + E = D + C + F = ((X1 - Y1) * (X2 + Y2) + B - A) % p + G = B + a * A + H = D - C + if not H: + return self._double(X1, Y1, Z1, T1, p, a) + X3 = E * F % p + Y3 = G * H % p + T3 = E * H % p + Z3 = F * G % p + + return X3, Y3, Z3, T3 + + def __add__(self, other): + """Add point to another.""" + if other == INFINITY: + return self + if ( + not isinstance(other, PointEdwards) + or self.__curve != other.__curve + ): + raise ValueError("The other point is on a different curve.") + + p, a = self.__curve.p(), self.__curve.a() + X1, Y1, Z1, T1 = self.__coords + X2, Y2, Z2, T2 = other.__coords + + X3, Y3, Z3, T3 = self._add(X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a) + + if not X3 or not T3: + return INFINITY + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __radd__(self, other): + """Add other to self.""" + return self + other + + def _double(self, X1, Y1, Z1, T1, p, a): + """Double the point, assume sane parameters.""" + # after "dbl-2008-hwcd" + # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html + # NOTE: there are more efficient formulas for Z1 == 1 + A = X1 * X1 % p + B = Y1 * Y1 % p + C = 2 * Z1 * Z1 % p + D = a * A % p + E = ((X1 + Y1) * (X1 + Y1) - A - B) % p + G = D + B + F = G - C + H = D - B + X3 = E * F % p + Y3 = G * H % p + T3 = E * H % p + Z3 = F * G % p + + return X3, Y3, Z3, T3 + + def double(self): + """Return point added to itself.""" + X1, Y1, Z1, T1 = self.__coords + + if not X1 or not T1: + return INFINITY + + p, a = self.__curve.p(), self.__curve.a() + + X3, Y3, Z3, T3 = self._double(X1, Y1, Z1, T1, p, a) + + # both Ed25519 and Ed448 have prime order, so no point added to + # itself will equal zero + if not X3 or not T3: # pragma: no branch + return INFINITY + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __rmul__(self, other): + """Multiply point by an integer.""" + return self * other + + def _mul_precompute(self, other): + """Multiply point by integer with precomputation table.""" + X3, Y3, Z3, T3, p, a = 0, 1, 1, 0, self.__curve.p(), self.__curve.a() + _add = self._add + for X2, Y2, T2 in self.__precompute: + rem = other % 4 + if rem == 0 or rem == 2: + other //= 2 + elif rem == 3: + other = (other + 1) // 2 + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, 1, -T2, p, a) + else: + assert rem == 1 + other = (other - 1) // 2 + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, 1, T2, p, a) + + if not X3 or not T3: + return INFINITY + + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __mul__(self, other): + """Multiply point by an integer.""" + X2, Y2, Z2, T2 = self.__coords + if not X2 or not T2 or not other: + return INFINITY + if other == 1: + return self + if self.__order: + # order*2 as a "protection" for Minerva + other = other % (self.__order * 2) + if self._maybe_precompute(): + return self._mul_precompute(other) + + X3, Y3, Z3, T3 = 0, 1, 1, 0 # INFINITY in extended coordinates + p, a = self.__curve.p(), self.__curve.a() + _double = self._double + _add = self._add + + for i in reversed(self._naf(other)): + X3, Y3, Z3, T3 = _double(X3, Y3, Z3, T3, p, a) + if i < 0: + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, Z2, -T2, p, a) + elif i > 0: + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, Z2, T2, p, a) + + if not X3 or not T3: + return INFINITY + + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + # This one point is the Point At Infinity for all purposes: INFINITY = Point(None, None, None) diff --git a/src/ecdsa/errors.py b/src/ecdsa/errors.py new file mode 100644 index 00000000..0184c05b --- /dev/null +++ b/src/ecdsa/errors.py @@ -0,0 +1,4 @@ +class MalformedPointError(AssertionError): + """Raised in case the encoding of private or public key is malformed.""" + + pass diff --git a/src/ecdsa/keys.py b/src/ecdsa/keys.py index 7e448aad..f74252c7 100644 --- a/src/ecdsa/keys.py +++ b/src/ecdsa/keys.py @@ -1,83 +1,19 @@ """ Primary classes for performing signing and verification operations. - -.. glossary:: - - raw encoding - Conversion of public, private keys and signatures (which in - mathematical sense are integers or pairs of integers) to strings of - bytes that does not use any special tags or encoding rules. - For any given curve, all keys of the same type or signatures will be - encoded to byte strings of the same length. In more formal sense, - the integers are encoded as big-endian, constant length byte strings, - where the string length is determined by the curve order (e.g. - for NIST256p the order is 256 bits long, so the private key will be 32 - bytes long while public key will be 64 bytes long). The encoding of a - single integer is zero-padded on the left if the numerical value is - low. In case of public keys and signatures, which are comprised of two - integers, the integers are simply concatenated. - - uncompressed - The most common formatting specified in PKIX standards. Specified in - X9.62 and SEC1 standards. The only difference between it and - :term:`raw encoding` is the prepending of a 0x04 byte. Thus an - uncompressed NIST256p public key encoding will be 65 bytes long. - - compressed - The public point representation that uses half of bytes of the - :term:`uncompressed` encoding (rounded up). It uses the first byte of - the encoding to specify the sign of the y coordinate and encodes the - x coordinate as-is. The first byte of the encoding is equal to - 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33 - bytes long. - - hybrid - A combination of :term:`uncompressed` and :term:`compressed` encodings. - Both x and y coordinates are stored just as in :term:`compressed` - encoding, but the first byte reflects the sign of the y coordinate. The - first byte of the encoding will be equal to 0x06 or 0x7. Hybrid - encoding of NIST256p public key will be 65 bytes long. - - PEM - The acronym stands for Privacy Enhanced Email, but currently it is used - primarily as the way to encode :term:`DER` objects into text that can - be either easily copy-pasted or transferred over email. - It uses headers like ``-----BEGIN -----`` and footers - like ``-----END -----`` to separate multiple - types of objects in the same file or the object from the surrounding - comments. The actual object stored is base64 encoded. - - DER - Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects - deterministically and uniquely into byte strings. - - ASN.1 - Abstract Syntax Notation 1 is a standard description language for - specifying serialisation and deserialisation of data structures in a - portable and cross-platform way. - - bytes-like object - All the types that implement the buffer protocol. That includes - ``str`` (only on python2), ``bytes``, ``bytesarray``, ``array.array` - and ``memoryview`` of those objects. - Please note that ``array.array` serialisation (converting it to byte - string) is endianess dependant! Signature computed over ``array.array`` - of integers on a big-endian system will not be verified on a - little-endian system and vice-versa. """ import binascii from hashlib import sha1 -from six import PY2, b -from . import ecdsa -from . import der +import os +from six import PY2 +from . import ecdsa, eddsa +from . import der, ssh from . import rfc6979 from . import ellipticcurve -from .curves import NIST192p, find_curve -from .numbertheory import square_root_mod_prime, SquareRootError +from .curves import NIST192p, Curve, Ed25519, Ed448 from .ecdsa import RSZeroError from .util import string_to_number, number_to_string, randrange -from .util import sigencode_string, sigdecode_string +from .util import sigencode_string, sigdecode_string, bit_length from .util import ( oid_ecPublicKey, encoded_oid_ecPublicKey, @@ -86,6 +22,8 @@ MalformedSignature, ) from ._compat import normalise_bytes +from .errors import MalformedPointError +from .ellipticcurve import PointJacobi, CurveEdTw __all__ = [ @@ -118,23 +56,49 @@ class BadDigestError(Exception): pass -class MalformedPointError(AssertionError): - """Raised in case the encoding of private or public key is malformed.""" +def _truncate_and_convert_digest(digest, curve, allow_truncate): + """Truncates and converts digest to an integer.""" + if not allow_truncate: + if len(digest) > curve.baselen: + raise BadDigestError( + "this curve ({0}) is too short " + "for the length of your digest ({1})".format( + curve.name, 8 * len(digest) + ) + ) + else: + digest = digest[: curve.baselen] + number = string_to_number(digest) + if allow_truncate: + max_length = bit_length(curve.order) + # we don't use bit_length(number) as that truncates leading zeros + length = len(digest) * 8 + + # See NIST FIPS 186-4: + # + # When the length of the output of the hash function is greater + # than N (i.e., the bit length of q), then the leftmost N bits of + # the hash function output block shall be used in any calculation + # using the hash function output during the generation or + # verification of a digital signature. + # + # as such, we need to shift-out the low-order bits: + number >>= max(0, length - max_length) - pass + return number class VerifyingKey(object): """ Class for handling keys that can verify signatures (public keys). - :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic - operations will take place + :ivar `~ecdsa.curves.Curve` ~.curve: The Curve over which all the + cryptographic operations will take place :ivar default_hashfunc: the function that will be used for hashing the data. Should implement the same API as hashlib.sha1 :vartype default_hashfunc: callable :ivar pubkey: the actual public key - :vartype pubkey: ecdsa.ecdsa.Public_key + :vartype pubkey: ~ecdsa.ecdsa.Public_key """ def __init__(self, _error__please_use_generate=None): @@ -149,8 +113,12 @@ def __init__(self, _error__please_use_generate=None): def __repr__(self): pub_key = self.to_string("compressed") + if self.default_hashfunc: + hash_name = self.default_hashfunc().name + else: + hash_name = "None" return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format( - pub_key, self.curve, self.default_hashfunc().name + pub_key, self.curve, hash_name ) def __eq__(self, other): @@ -159,6 +127,10 @@ def __eq__(self, other): return self.curve == other.curve and self.pubkey == other.pubkey return NotImplemented + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + @classmethod def from_public_point( cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True @@ -169,25 +141,27 @@ def from_public_point( This is a low-level method, generally you will not want to use it. :param point: The point to wrap around, the actual public key - :type point: ecdsa.ellipticcurve.Point + :type point: ~ecdsa.ellipticcurve.AbstractPoint :param curve: The curve on which the point needs to reside, defaults to NIST192p - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface - as hashlib.sha1 + as :py:class:`hashlib.sha1` :type hashfunc: callable - :type bool validate_point: whether to check if the point lies on curve + :type bool validate_point: whether to check if the point lays on curve should always be used if the public point is not a result of our own calculation - :raises MalformedPointError: if the public point does not lie on the + :raises MalformedPointError: if the public point does not lay on the curve :return: Initialised VerifyingKey object :rtype: VerifyingKey """ self = cls(_error__please_use_generate=True) + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method incompatible with Edwards curves") if not isinstance(point, ellipticcurve.PointJacobi): point = ellipticcurve.PointJacobi.from_affine(point) self.curve = curve @@ -197,90 +171,68 @@ def from_public_point( curve.generator, point, validate_point ) except ecdsa.InvalidPointError: - raise MalformedPointError("Point does not lie on the curve") + raise MalformedPointError("Point does not lay on the curve") self.pubkey.order = curve.order return self - def precompute(self): - self.pubkey.point = ellipticcurve.PointJacobi.from_affine( - self.pubkey.point, True - ) - - @staticmethod - def _from_raw_encoding(string, curve): + def precompute(self, lazy=False): """ - Decode public point from :term:`raw encoding`. - - :term:`raw encoding` is the same as the :term:`uncompressed` encoding, - but without the 0x04 byte at the beginning. + Precompute multiplication tables for faster signature verification. + + Calling this method will cause the library to precompute the + scalar multiplication tables, used in signature verification. + While it's an expensive operation (comparable to performing + as many signatures as the bit size of the curve, i.e. 256 for NIST256p) + it speeds up verification 2 times. You should call this method + if you expect to verify hundreds of signatures (or more) using the same + VerifyingKey object. + + Note: You should call this method only once, this method generates a + new precomputation table every time it's called. + + :param bool lazy: whether to calculate the precomputation table now + (if set to False) or if it should be delayed to the time of first + use (when set to True) """ - order = curve.order - # real assert, from_string() should not call us with different length - assert len(string) == curve.verifying_key_length - xs = string[: curve.baselen] - ys = string[curve.baselen :] - if len(xs) != curve.baselen: - raise MalformedPointError("Unexpected length of encoded x") - if len(ys) != curve.baselen: - raise MalformedPointError("Unexpected length of encoded y") - x = string_to_number(xs) - y = string_to_number(ys) - - return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order) - - @staticmethod - def _from_compressed(string, curve): - """Decode public point from compressed encoding.""" - if string[:1] not in (b("\x02"), b("\x03")): - raise MalformedPointError("Malformed compressed point encoding") - - is_even = string[:1] == b("\x02") - x = string_to_number(string[1:]) - order = curve.order - p = curve.curve.p() - alpha = (pow(x, 3, p) + (curve.curve.a() * x) + curve.curve.b()) % p - try: - beta = square_root_mod_prime(alpha, p) - except SquareRootError as e: - raise MalformedPointError( - "Encoding does not correspond to a point on curve", e + if isinstance(self.curve.curve, CurveEdTw): + pt = self.pubkey.point + self.pubkey.point = ellipticcurve.PointEdwards( + pt.curve(), + pt.x(), + pt.y(), + 1, + pt.x() * pt.y(), + self.curve.order, + generator=True, ) - if is_even == bool(beta & 1): - y = p - beta else: - y = beta - return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order) - - @classmethod - def _from_hybrid(cls, string, curve, validate_point): - """Decode public point from hybrid encoding.""" - # real assert, from_string() should not call us with different types - assert string[:1] in (b("\x06"), b("\x07")) - - # primarily use the uncompressed as it's easiest to handle - point = cls._from_raw_encoding(string[1:], curve) - - # but validate if it's self-consistent if we're asked to do that - if validate_point and ( - point.y() & 1 - and string[:1] != b("\x07") - or (not point.y() & 1) - and string[:1] != b("\x06") - ): - raise MalformedPointError("Inconsistent hybrid point encoding") - - return point + self.pubkey.point = ellipticcurve.PointJacobi.from_affine( + self.pubkey.point, True + ) + # as precomputation in now delayed to the time of first use of the + # point and we were asked specifically to precompute now, make + # sure the precomputation is performed now to preserve the behaviour + if not lazy: + self.pubkey.point * 2 @classmethod def from_string( - cls, string, curve=NIST192p, hashfunc=sha1, validate_point=True + cls, + string, + curve=NIST192p, + hashfunc=sha1, + validate_point=True, + valid_encodings=None, ): """ Initialise the object from byte encoding of public key. The method does accept and automatically detect the type of point encoding used. It supports the :term:`raw encoding`, - :term:`uncompressed`, :term:`compressed` and :term:`hybrid` encodings. + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + It also works with the native encoding of Ed25519 and Ed448 public + keys (technically those are compressed, but encoded differently than + in other signature systems). Note, while the method is named "from_string" it's a misnomer from Python 2 days when there were no binary strings. In Python 3 the @@ -288,46 +240,54 @@ def from_string( :param string: single point encoding of the public key :type string: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lie - :type curve: ecdsa.curves.Curve + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface as hashlib.sha1 + verification, needs to implement the same interface as + hashlib.sha1. Ignored for EdDSA. :type hashfunc: callable - :param validate_point: whether to verify that the point lies on the - provided curve or not, defaults to True + :param validate_point: whether to verify that the point lays on the + provided curve or not, defaults to True. Ignored for EdDSA. :type validate_point: bool - - :raises MalformedPointError: if the public point does not lie on the + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + Ignored for EdDSA. + :type valid_encodings: :term:`set-like object` + + :raises MalformedPointError: if the public point does not lay on the curve or the encoding is invalid :return: Initialised VerifyingKey object :rtype: VerifyingKey """ - string = normalise_bytes(string) - sig_len = len(string) - if sig_len == curve.verifying_key_length: - point = cls._from_raw_encoding(string, curve) - elif sig_len == curve.verifying_key_length + 1: - if string[:1] in (b("\x06"), b("\x07")): - point = cls._from_hybrid(string, curve, validate_point) - elif string[:1] == b("\x04"): - point = cls._from_raw_encoding(string[1:], curve) - else: - raise MalformedPointError( - "Invalid X9.62 encoding of the public point" - ) - elif sig_len == curve.baselen + 1: - point = cls._from_compressed(string, curve) - else: - raise MalformedPointError( - "Length of string does not match lengths of " - "any of the supported encodings of {0} " - "curve.".format(curve.name) - ) + if isinstance(curve.curve, CurveEdTw): + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None # ignored for EdDSA + try: + self.pubkey = eddsa.PublicKey(curve.generator, string) + except ValueError: + raise MalformedPointError("Malformed point for the curve") + return self + + point = PointJacobi.from_bytes( + curve.curve, + string, + validate_encoding=validate_point, + valid_encodings=valid_encodings, + ) return cls.from_public_point(point, curve, hashfunc, validate_point) @classmethod - def from_pem(cls, string, hashfunc=sha1): + def from_pem( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): """ Initialise from public key stored in :term:`PEM` format. @@ -336,19 +296,40 @@ def from_pem(cls, string, hashfunc=sha1): See the :func:`~VerifyingKey.from_der()` method for details of the format supported. - Note: only a single PEM object encoding is supported in provided + Note: only a single PEM object decoding is supported in provided string. :param string: text with PEM-encoded public ECDSA key :type string: str + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + :return: Initialised VerifyingKey object :rtype: VerifyingKey """ - return cls.from_der(der.unpem(string), hashfunc=hashfunc) + return cls.from_der( + der.unpem(string), + hashfunc=hashfunc, + valid_encodings=valid_encodings, + valid_curve_encodings=valid_curve_encodings, + ) @classmethod - def from_der(cls, string, hashfunc=sha1): + def from_der( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): """ Initialise the key stored in :term:`DER` format. @@ -373,10 +354,21 @@ def from_der(cls, string, hashfunc=sha1): :param string: binary string with the DER encoding of public ECDSA key :type string: bytes-like object + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` :return: Initialised VerifyingKey object :rtype: VerifyingKey """ + if valid_encodings is None: + valid_encodings = set(["uncompressed", "compressed", "hybrid"]) string = normalise_bytes(string) # [[oid_ecPublicKey,oid_curve], point_str_bitstring] s1, empty = der.remove_sequence(string) @@ -387,18 +379,22 @@ def from_der(cls, string, hashfunc=sha1): s2, point_str_bitstring = der.remove_sequence(s1) # s2 = oid_ecPublicKey,oid_curve oid_pk, rest = der.remove_object(s2) - oid_curve, empty = der.remove_object(rest) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER pubkey objects: %s" - % binascii.hexlify(empty) - ) + if oid_pk in (Ed25519.oid, Ed448.oid): + if oid_pk == Ed25519.oid: + curve = Ed25519 + else: + assert oid_pk == Ed448.oid + curve = Ed448 + point_str, empty = der.remove_bitstring(point_str_bitstring, 0) + if empty: + raise der.UnexpectedDER("trailing junk after public key") + return cls.from_string(point_str, curve, None) if not oid_pk == oid_ecPublicKey: raise der.UnexpectedDER( "Unexpected object identifier in DER " "encoding: {0!r}".format(oid_pk) ) - curve = find_curve(oid_curve) + curve = Curve.from_der(rest, valid_curve_encodings) point_str, empty = der.remove_bitstring(point_str_bitstring, 0) if empty != b"": raise der.UnexpectedDER( @@ -408,11 +404,22 @@ def from_der(cls, string, hashfunc=sha1): # raw encoding of point is invalid in DER files if len(point_str) == curve.verifying_key_length: raise der.UnexpectedDER("Malformed encoding of public point") - return cls.from_string(point_str, curve, hashfunc=hashfunc) + return cls.from_string( + point_str, + curve, + hashfunc=hashfunc, + valid_encodings=valid_encodings, + ) @classmethod def from_public_key_recovery( - cls, signature, data, curve, hashfunc=sha1, sigdecode=sigdecode_string + cls, + signature, + data, + curve, + hashfunc=sha1, + sigdecode=sigdecode_string, + allow_truncate=True, ): """ Return keys that can be used as verifiers of the provided signature. @@ -425,7 +432,7 @@ def from_public_key_recovery( :param data: the data to be hashed for signature verification :type data: bytes-like object :param curve: the curve over which the signature was performed - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable @@ -435,15 +442,25 @@ def from_public_key_recovery( a tuple with two integers, "r" as the first one and "s" as the second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve, the + extra bits (at the end of the digest) will be truncated. :type sigdecode: callable :return: Initialised VerifyingKey objects :rtype: list of VerifyingKey """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") data = normalise_bytes(data) digest = hashfunc(data).digest() return cls.from_public_key_recovery_with_digest( - signature, digest, curve, hashfunc=hashfunc, sigdecode=sigdecode + signature, + digest, + curve, + hashfunc=hashfunc, + sigdecode=sigdecode, + allow_truncate=allow_truncate, ) @classmethod @@ -454,6 +471,7 @@ def from_public_key_recovery_with_digest( curve, hashfunc=sha1, sigdecode=sigdecode_string, + allow_truncate=False, ): """ Return keys that can be used as verifiers of the provided signature. @@ -466,7 +484,7 @@ def from_public_key_recovery_with_digest( :param digest: the hash value of the message signed by the signature :type digest: bytes-like object :param curve: the curve over which the signature was performed - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable @@ -477,17 +495,24 @@ def from_public_key_recovery_with_digest( second one. See :func:`ecdsa.util.sigdecode_string` and :func:`ecdsa.util.sigdecode_der` for examples. :type sigdecode: callable - + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve (and + the length of provided `digest`), the extra bits (at the end of the + digest) will be truncated. :return: Initialised VerifyingKey object :rtype: VerifyingKey """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") generator = curve.generator r, s = sigdecode(signature, generator.order()) sig = ecdsa.Signature(r, s) digest = normalise_bytes(digest) - digest_as_number = string_to_number(digest) + digest_as_number = _truncate_and_convert_digest( + digest, curve, allow_truncate + ) pks = sig.recover_public_keys(digest_as_number, generator) # Transforms the ecdsa.Public_key object into a VerifyingKey @@ -496,30 +521,6 @@ def from_public_key_recovery_with_digest( ] return verifying_keys - def _raw_encode(self): - """Convert the public key to the :term:`raw encoding`.""" - order = self.pubkey.order - x_str = number_to_string(self.pubkey.point.x(), order) - y_str = number_to_string(self.pubkey.point.y(), order) - return x_str + y_str - - def _compressed_encode(self): - """Encode the public point into the compressed form.""" - order = self.pubkey.order - x_str = number_to_string(self.pubkey.point.x(), order) - if self.pubkey.point.y() & 1: - return b("\x03") + x_str - else: - return b("\x02") + x_str - - def _hybrid_encode(self): - """Encode the public point into the hybrid form.""" - raw_enc = self._raw_encode() - if self.pubkey.point.y() & 1: - return b("\x07") + raw_enc - else: - return b("\x06") + raw_enc - def to_string(self, encoding="raw"): """ Convert the public key to a byte string. @@ -541,16 +542,11 @@ def to_string(self, encoding="raw"): :rtype: bytes """ assert encoding in ("raw", "uncompressed", "compressed", "hybrid") - if encoding == "raw": - return self._raw_encode() - elif encoding == "uncompressed": - return b("\x04") + self._raw_encode() - elif encoding == "hybrid": - return self._hybrid_encode() - else: - return self._compressed_encode() + return self.pubkey.point.to_bytes(encoding) - def to_pem(self, point_encoding="uncompressed"): + def to_pem( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): """ Convert the public key to the :term:`PEM` format. @@ -564,6 +560,9 @@ def to_pem(self, point_encoding="uncompressed"): of public keys. "uncompressed" is most portable, "compressed" is smallest. "hybrid" is uncommon and unsupported by most implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``explicit`` encoding. :return: portable encoding of the public key :rtype: bytes @@ -571,9 +570,14 @@ def to_pem(self, point_encoding="uncompressed"): .. warning:: The PEM is encoded to US-ASCII, it needs to be re-encoded if the system is incompatible (e.g. uses UTF-16) """ - return der.topem(self.to_der(point_encoding), "PUBLIC KEY") + return der.topem( + self.to_der(point_encoding, curve_parameters_encoding), + "PUBLIC KEY", + ) - def to_der(self, point_encoding="uncompressed"): + def to_der( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): """ Convert the public key to the :term:`DER` format. @@ -585,6 +589,9 @@ def to_der(self, point_encoding="uncompressed"): of public keys. "uncompressed" is most portable, "compressed" is smallest. "hybrid" is uncommon and unsupported by most implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``explicit`` encoding. :return: DER encoding of the public key :rtype: bytes @@ -592,15 +599,33 @@ def to_der(self, point_encoding="uncompressed"): if point_encoding == "raw": raise ValueError("raw point_encoding not allowed in DER") point_str = self.to_string(point_encoding) + if isinstance(self.curve.curve, CurveEdTw): + return der.encode_sequence( + der.encode_sequence(der.encode_oid(*self.curve.oid)), + der.encode_bitstring(bytes(point_str), 0), + ) return der.encode_sequence( der.encode_sequence( - encoded_oid_ecPublicKey, self.curve.encoded_oid + encoded_oid_ecPublicKey, + self.curve.to_der(curve_parameters_encoding, point_encoding), ), # 0 is the number of unused bits in the # bit string der.encode_bitstring(point_str, 0), ) + def to_ssh(self): + """ + Convert the public key to the SSH format. + + :return: SSH encoding of the public key + :rtype: bytes + """ + return ssh.serialize_public( + self.curve.name, + self.to_string(), + ) + def verify( self, signature, @@ -620,10 +645,10 @@ def verify( as the `sigdecode` parameter. :param signature: encoding of the signature - :type signature: sigdecode method dependant + :type signature: sigdecode method dependent :param data: data signed by the `signature`, will be hashed using `hashfunc`, if specified, or default hash function - :type data: bytes like object + :type data: :term:`bytes-like object` :param hashfunc: The default hash function that will be used for verification, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable @@ -648,6 +673,12 @@ def verify( # signature doesn't have to be a bytes-like-object so don't normalise # it, the decoders will do that data = normalise_bytes(data) + if isinstance(self.curve.curve, CurveEdTw): + signature = normalise_bytes(signature) + try: + return self.pubkey.verify(data, signature) + except (ValueError, MalformedPointError) as e: + raise BadSignatureError("Signature verification failed", e) hashfunc = hashfunc or self.default_hashfunc digest = hashfunc(data).digest() @@ -669,9 +700,9 @@ def verify_digest( as the `sigdecode` parameter. :param signature: encoding of the signature - :type signature: sigdecode method dependant + :type signature: sigdecode method dependent :param digest: raw hash value that the signature authenticates. - :type digest: bytes like object + :type digest: :term:`bytes-like object` :param sigdecode: Callable to define the way the signature needs to be decoded to an object, needs to handle `signature` as the first parameter, the curve order (an int) as the second and return @@ -694,14 +725,12 @@ def verify_digest( # signature doesn't have to be a bytes-like-object so don't normalise # it, the decoders will do that digest = normalise_bytes(digest) - if allow_truncate: - digest = digest[: self.curve.baselen] - if len(digest) > self.curve.baselen: - raise BadDigestError( - "this curve (%s) is too short " - "for your digest (%d)" % (self.curve.name, 8 * len(digest)) - ) - number = string_to_number(digest) + number = _truncate_and_convert_digest( + digest, + self.curve, + allow_truncate, + ) + try: r, s = sigdecode(signature, self.pubkey.order) except (der.UnexpectedDER, MalformedSignature) as e: @@ -716,14 +745,14 @@ class SigningKey(object): """ Class for handling keys that can create signatures (private keys). - :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic - operations will take place + :ivar `~ecdsa.curves.Curve` curve: The Curve over which all the + cryptographic operations will take place :ivar default_hashfunc: the function that will be used for hashing the - data. Should implement the same API as hashlib.sha1 + data. Should implement the same API as :py:class:`hashlib.sha1` :ivar int baselen: the length of a :term:`raw encoding` of private key - :ivar ecdsa.keys.VerifyingKey verifying_key: the public key + :ivar `~ecdsa.keys.VerifyingKey` verifying_key: the public key associated with this private key - :ivar ecdsa.ecdsa.Private_key privkey: the actual private key + :ivar `~ecdsa.ecdsa.Private_key` privkey: the actual private key """ def __init__(self, _error__please_use_generate=None): @@ -746,6 +775,37 @@ def __eq__(self, other): ) return NotImplemented + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + @classmethod + def _twisted_edwards_keygen(cls, curve, entropy): + """Generate a private key on a Twisted Edwards curve.""" + if not entropy: + entropy = os.urandom + random = entropy(curve.baselen) + private_key = eddsa.PrivateKey(curve.generator, random) + public_key = private_key.public_key() + + verifying_key = VerifyingKey.from_string( + public_key.public_key(), curve + ) + + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None + self.baselen = curve.baselen + self.privkey = private_key + self.verifying_key = verifying_key + return self + + @classmethod + def _weierstrass_keygen(cls, curve, entropy, hashfunc): + """Generate a private key on a Weierstrass curve.""" + secexp = randrange(curve.order, entropy) + return cls.from_secret_exponent(secexp, curve, hashfunc) + @classmethod def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): """ @@ -753,7 +813,7 @@ def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): :param curve: The curve on which the point needs to reside, defaults to NIST192p - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param entropy: Source of randomness for generating the private keys, should provide cryptographically secure random numbers if the keys need to be secure. Uses os.urandom() by default. @@ -766,8 +826,9 @@ def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): :return: Initialised SigningKey object :rtype: SigningKey """ - secexp = randrange(curve.order, entropy) - return cls.from_secret_exponent(secexp, curve, hashfunc) + if isinstance(curve.curve, CurveEdTw): + return cls._twisted_edwards_keygen(curve, entropy) + return cls._weierstrass_keygen(curve, entropy, hashfunc) @classmethod def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): @@ -780,7 +841,7 @@ def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): :param int secexp: secret multiplier (the actual private key in ECDSA). Needs to be an integer between 1 and the curve order. :param curve: The curve on which the point needs to reside - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 @@ -794,6 +855,11 @@ def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): :return: Initialised SigningKey object :rtype: SigningKey """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError( + "Edwards keys don't support setting the secret scalar " + "(exponent) directly" + ) self = cls(_error__please_use_generate=True) self.curve = curve self.default_hashfunc = hashfunc @@ -825,9 +891,9 @@ def from_string(cls, string, curve=NIST192p, hashfunc=sha1): In Python 3, the expected type is `bytes`. :param string: the raw encoding of the private key - :type string: bytes like object + :type string: :term:`bytes-like object` :param curve: The curve on which the point needs to reside - :type curve: ecdsa.curves.Curve + :type curve: ~ecdsa.curves.Curve :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 @@ -842,16 +908,27 @@ def from_string(cls, string, curve=NIST192p, hashfunc=sha1): :rtype: SigningKey """ string = normalise_bytes(string) + if len(string) != curve.baselen: raise MalformedPointError( "Invalid length of private key, received {0}, " "expected {1}".format(len(string), curve.baselen) ) + if isinstance(curve.curve, CurveEdTw): + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None # Ignored for EdDSA + self.baselen = curve.baselen + self.privkey = eddsa.PrivateKey(curve.generator, string) + self.verifying_key = VerifyingKey.from_string( + self.privkey.public_key().public_key(), curve + ) + return self secexp = string_to_number(string) return cls.from_secret_exponent(secexp, curve, hashfunc) @classmethod - def from_pem(cls, string, hashfunc=sha1): + def from_pem(cls, string, hashfunc=sha1, valid_curve_encodings=None): """ Initialise from key stored in :term:`PEM` format. @@ -871,6 +948,11 @@ def from_pem(cls, string, hashfunc=sha1): :param string: text with PEM-encoded private ECDSA key :type string: str + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + :raises MalformedPointError: if the length of encoding doesn't match the provided curve or the encoded values is too large @@ -881,7 +963,7 @@ def from_pem(cls, string, hashfunc=sha1): :return: Initialised SigningKey object :rtype: SigningKey """ - if not PY2 and isinstance(string, str): + if not PY2 and isinstance(string, str): # pragma: no branch string = string.encode() # The privkey pem may have multiple sections, commonly it also has @@ -891,10 +973,14 @@ def from_pem(cls, string, hashfunc=sha1): if private_key_index == -1: private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----") - return cls.from_der(der.unpem(string[private_key_index:]), hashfunc) + return cls.from_der( + der.unpem(string[private_key_index:]), + hashfunc, + valid_curve_encodings, + ) @classmethod - def from_der(cls, string, hashfunc=sha1): + def from_der(cls, string, hashfunc=sha1, valid_curve_encodings=None): """ Initialise from key stored in :term:`DER` format. @@ -915,14 +1001,14 @@ def from_der(cls, string, hashfunc=sha1): `publicKey` field is ignored completely (errors, if any, in it will be undetected). - The only format supported for the `parameters` field is the named - curve method. Explicit encoding of curve parameters is not supported. + Two formats are supported for the `parameters` field: the named + curve and the explicit encoding of curve parameters. In the legacy ssleay format, this implementation requires the optional `parameters` field to get the curve name. In PKCS #8 format, the curve is part of the PrivateKeyAlgorithmIdentifier. The PKCS #8 format includes an ECPrivateKey object as the `privateKey` - field within a larger structure: + field within a larger structure:: OneAsymmetricKey ::= SEQUENCE { version Version, @@ -938,7 +1024,12 @@ def from_der(cls, string, hashfunc=sha1): in them will not be detected. :param string: binary string with DER-encoded private ECDSA key - :type string: bytes like object + :type string: :term:`bytes-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + Ignored for EdDSA. + :type valid_curve_encodings: :term:`set-like object` :raises MalformedPointError: if the length of encoding doesn't match the provided curve or the encoded values is too large @@ -953,7 +1044,7 @@ def from_der(cls, string, hashfunc=sha1): curve = None s, empty = der.remove_sequence(s) - if empty != b(""): + if empty != b"": raise der.UnexpectedDER( "trailing junk after DER privkey: %s" % binascii.hexlify(empty) ) @@ -973,18 +1064,43 @@ def from_der(cls, string, hashfunc=sha1): sequence, s = der.remove_sequence(s) algorithm_oid, algorithm_identifier = der.remove_object(sequence) - curve_oid, empty = der.remove_object(algorithm_identifier) - curve = find_curve(curve_oid) + + if algorithm_oid in (Ed25519.oid, Ed448.oid): + if algorithm_identifier: + raise der.UnexpectedDER( + "Non NULL parameters for a EdDSA key" + ) + key_str_der, s = der.remove_octet_string(s) + + # As RFC5958 describe, there are may be optional Attributes + # and Publickey. Don't raise error if something after + # Privatekey + + # TODO parse attributes or validate publickey + # if s: + # raise der.UnexpectedDER( + # "trailing junk inside the privateKey" + # ) + key_str, s = der.remove_octet_string(key_str_der) + if s: + raise der.UnexpectedDER( + "trailing junk after the encoded private key" + ) + + if algorithm_oid == Ed25519.oid: + curve = Ed25519 + else: + assert algorithm_oid == Ed448.oid + curve = Ed448 + + return cls.from_string(key_str, curve, None) if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV): raise der.UnexpectedDER( "unexpected algorithm identifier '%s'" % (algorithm_oid,) ) - if empty != b"": - raise der.UnexpectedDER( - "unexpected data after algorithm identifier: %s" - % binascii.hexlify(empty) - ) + + curve = Curve.from_der(algorithm_identifier, valid_curve_encodings) # Up next is an octet string containing an ECPrivateKey. Ignore # the optional "attributes" and "publicKey" fields that come after. @@ -993,7 +1109,7 @@ def from_der(cls, string, hashfunc=sha1): # Unpack the ECPrivateKey to get to the key data octet string, # and rejoin the ssleay parsing path. s, empty = der.remove_sequence(s) - if empty != b(""): + if empty != b"": raise der.UnexpectedDER( "trailing junk after DER privkey: %s" % binascii.hexlify(empty) @@ -1016,13 +1132,7 @@ def from_der(cls, string, hashfunc=sha1): raise der.UnexpectedDER( "expected tag 0 in DER privkey, got %d" % tag ) - curve_oid, empty = der.remove_object(curve_oid_str) - if empty != b(""): - raise der.UnexpectedDER( - "trailing junk after DER privkey " - "curve_oid: %s" % binascii.hexlify(empty) - ) - curve = find_curve(curve_oid) + curve = Curve.from_der(curve_oid_str, valid_curve_encodings) # we don't actually care about the following fields # @@ -1039,7 +1149,7 @@ def from_der(cls, string, hashfunc=sha1): # our from_string method likes fixed-length privkey strings if len(privkey_str) < curve.baselen: privkey_str = ( - b("\x00") * (curve.baselen - len(privkey_str)) + privkey_str + b"\x00" * (curve.baselen - len(privkey_str)) + privkey_str ) return cls.from_string(privkey_str, curve, hashfunc) @@ -1054,11 +1164,18 @@ def to_string(self): :return: raw encoding of private key :rtype: bytes """ + if isinstance(self.curve.curve, CurveEdTw): + return bytes(self.privkey.private_key) secexp = self.privkey.secret_multiplier s = number_to_string(secexp, self.privkey.order) return s - def to_pem(self, point_encoding="uncompressed", format="ssleay"): + def to_pem( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): """ Convert the private key to the :term:`PEM` format. @@ -1072,6 +1189,11 @@ def to_pem(self, point_encoding="uncompressed", format="ssleay"): :param str point_encoding: format to use for encoding public point :param str format: either ``ssleay`` (default) or ``pkcs8`` + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. :return: PEM encoded private key :rtype: bytes @@ -1082,9 +1204,26 @@ def to_pem(self, point_encoding="uncompressed", format="ssleay"): # TODO: "BEGIN ECPARAMETERS" assert format in ("ssleay", "pkcs8") header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY" - return der.topem(self.to_der(point_encoding, format), header) + return der.topem( + self.to_der(point_encoding, format, curve_parameters_encoding), + header, + ) + + def _encode_eddsa(self): + """Create a PKCS#8 encoding of EdDSA keys.""" + ec_private_key = der.encode_octet_string(self.to_string()) + return der.encode_sequence( + der.encode_integer(0), + der.encode_sequence(der.encode_oid(*self.curve.oid)), + der.encode_octet_string(ec_private_key), + ) - def to_der(self, point_encoding="uncompressed", format="ssleay"): + def to_der( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): """ Convert the private key to the :term:`DER` format. @@ -1094,7 +1233,15 @@ def to_der(self, point_encoding="uncompressed", format="ssleay"): The public key will be included in the generated string. :param str point_encoding: format to use for encoding public point - :param str format: either ``ssleay`` (default) or ``pkcs8`` + Ignored for EdDSA + :param str format: either ``ssleay`` (default) or ``pkcs8``. + EdDSA keys require ``pkcs8``. + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. + Ignored for EdDSA. :return: DER encoded private key :rtype: bytes @@ -1104,15 +1251,27 @@ def to_der(self, point_encoding="uncompressed", format="ssleay"): if point_encoding == "raw": raise ValueError("raw encoding not allowed in DER") assert format in ("ssleay", "pkcs8") + if isinstance(self.curve.curve, CurveEdTw): + if format != "pkcs8": + raise ValueError("Only PKCS#8 format supported for EdDSA keys") + return self._encode_eddsa() encoded_vk = self.get_verifying_key().to_string(point_encoding) - # the 0 in encode_bitstring specifies the number of unused bits - # in the `encoded_vk` string - ec_private_key = der.encode_sequence( + priv_key_elems = [ der.encode_integer(1), der.encode_octet_string(self.to_string()), - der.encode_constructed(0, self.curve.encoded_oid), - der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)), + ] + if format == "ssleay": + priv_key_elems.append( + der.encode_constructed( + 0, self.curve.to_der(curve_parameters_encoding) + ) + ) + # the 0 in encode_bitstring specifies the number of unused bits + # in the `encoded_vk` string + priv_key_elems.append( + der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)) ) + ec_private_key = der.encode_sequence(*priv_key_elems) if format == "ssleay": return ec_private_key @@ -1122,11 +1281,25 @@ def to_der(self, point_encoding="uncompressed", format="ssleay"): # top-level structure. der.encode_integer(1), der.encode_sequence( - der.encode_oid(*oid_ecPublicKey), self.curve.encoded_oid + der.encode_oid(*oid_ecPublicKey), + self.curve.to_der(curve_parameters_encoding), ), der.encode_octet_string(ec_private_key), ) + def to_ssh(self): + """ + Convert the private key to the SSH format. + + :return: SSH encoded private key + :rtype: bytes + """ + return ssh.serialize_private( + self.curve.name, + self.verifying_key.to_string(), + self.to_string(), + ) + def get_verifying_key(self): """ Return the VerifyingKey associated with this private key. @@ -1147,20 +1320,27 @@ def sign_deterministic( extra_entropy=b"", ): """ - Create signature over data using the deterministic RFC6679 algorithm. + Create signature over data. + + For Weierstrass curves it uses the deterministic RFC6979 algorithm. + For Edwards curves it uses the standard EdDSA algorithm. - The data will be hashed using the `hashfunc` function before signing. + For ECDSA the data will be hashed using the `hashfunc` function before + signing. + For EdDSA the data will be hashed with the hash associated with the + curve (SHA-512 for Ed25519 and SHAKE-256 for Ed448). This is the recommended method for performing signatures when hashing of data is necessary. :param data: data to be hashed and computed signature over - :type data: bytes like object + :type data: :term:`bytes-like object` :param hashfunc: hash function to use for computing the signature, if unspecified, the default hash function selected during object initialisation will be used (see `VerifyingKey.default_hashfunc`). The object needs to implement the same interface as hashlib.sha1. + Ignored with EdDSA. :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers @@ -1168,16 +1348,22 @@ def sign_deterministic( signature was computed. It needs to return an encoded signature. See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` as examples of such functions. + Ignored with EdDSA. :type sigencode: callable :param extra_entropy: additional data that will be fed into the random number generator used in the RFC6979 process. Entirely optional. - :type extra_entropy: bytes like object + Ignored with EdDSA. + :type extra_entropy: :term:`bytes-like object` :return: encoded signature over `data` - :rtype: bytes or sigencode function dependant type + :rtype: bytes or sigencode function dependent type """ hashfunc = hashfunc or self.default_hashfunc data = normalise_bytes(data) + + if isinstance(self.curve.curve, CurveEdTw): + return self.privkey.sign(data) + extra_entropy = normalise_bytes(extra_entropy) digest = hashfunc(data).digest() @@ -1198,7 +1384,7 @@ def sign_digest_deterministic( allow_truncate=False, ): """ - Create signature for digest using the deterministic RFC6679 algorithm. + Create signature for digest using the deterministic RFC6979 algorithm. `digest` should be the output of cryptographically secure hash function like SHA256 or SHA-3-256. @@ -1207,32 +1393,36 @@ def sign_digest_deterministic( hashing of data is necessary. :param digest: hash of data that will be signed - :type digest: bytes like object + :type digest: :term:`bytes-like object` :param hashfunc: hash function to use for computing the random "k" value from RFC6979 process, if unspecified, the default hash function selected during object initialisation will be used (see - `VerifyingKey.default_hashfunc`). The object needs to implement - the same interface as hashlib.sha1. + :attr:`.VerifyingKey.default_hashfunc`). The object needs to + implement + the same interface as :func:`~hashlib.sha1` from :py:mod:`hashlib`. :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. - See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + See :func:`~ecdsa.util.sigencode_string` and + :func:`~ecdsa.util.sigencode_der` as examples of such functions. :type sigencode: callable :param extra_entropy: additional data that will be fed into the random number generator used in the RFC6979 process. Entirely optional. - :type extra_entropy: bytes like object + :type extra_entropy: :term:`bytes-like object` :param bool allow_truncate: if True, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when signing SHA-384 output using NIST256p or in similar situations. :return: encoded signature for the `digest` hash - :rtype: bytes or sigencode function dependant type + :rtype: bytes or sigencode function dependent type """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") secexp = self.privkey.secret_multiplier hashfunc = hashfunc or self.default_hashfunc digest = normalise_bytes(digest) @@ -1274,7 +1464,11 @@ def sign( allow_truncate=True, ): """ - Create signature over data using the probabilistic ECDSA algorithm. + Create signature over data. + + Uses the probabilistic ECDSA algorithm for Weierstrass curves + (NIST256p, etc.) and the deterministic EdDSA algorithm for the + Edwards curves (Ed25519, Ed448). This method uses the standard ECDSA algorithm that requires a cryptographically secure random number generator. @@ -1283,46 +1477,62 @@ def sign( method instead of this one. :param data: data that will be hashed for signing - :type data: bytes like object - :param callable entropy: randomness source, os.urandom by default - :param hashfunc: hash function to use for hashing the provided `data`. + :type data: :term:`bytes-like object` + :param callable entropy: randomness source, :func:`os.urandom` by + default. Ignored with EdDSA. + :param hashfunc: hash function to use for hashing the provided + ``data``. If unspecified the default hash function selected during object initialisation will be used (see - `VerifyingKey.default_hashfunc`). - Should behave like hashlib.sha1. The output length of the + :attr:`.VerifyingKey.default_hashfunc`). + Should behave like :func:`~hashlib.sha1` from :py:mod:`hashlib`. + The output length of the hash (in bytes) must not be longer than the length of the curve order (rounded up to the nearest byte), so using SHA256 with NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish unlikely event of a hash output larger than the curve order, the hash will effectively be wrapped mod n). - Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode, - or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256. + If you want to explicitly allow use of large hashes with small + curves set the ``allow_truncate`` to ``True``. + Use ``hashfunc=hashlib.sha1`` to match openssl's + ``-ecdsa-with-SHA1`` mode, + or ``hashfunc=hashlib.sha256`` for openssl-1.0.0's + ``-ecdsa-with-SHA256``. + Ignored for EdDSA :type hashfunc: callable :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers that are the signature and the order of the curve over which the signature was computed. It needs to return an encoded signature. - See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + See :func:`~ecdsa.util.sigencode_string` and + :func:`~ecdsa.util.sigencode_der` as examples of such functions. + Ignored for EdDSA :type sigencode: callable :param int k: a pre-selected nonce for calculating the signature. In typical use cases, it should be set to None (the default) to allow its generation from an entropy source. - :param bool allow_truncate: if True, the provided digest can have + Ignored for EdDSA. + :param bool allow_truncate: if ``True``, the provided digest can have bigger bit-size than the order of the curve, the extra bits (at the end of the digest) will be truncated. Use it when signing SHA-384 output using NIST256p or in similar situations. True by default. + Ignored for EdDSA. - :raises RSZeroError: in the unlikely event when "r" parameter or - "s" parameter is equal 0 as that would leak the key. Calee should - try a better entropy source or different 'k' in such case. + :raises RSZeroError: in the unlikely event when *r* parameter or + *s* parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different ``k``, or use the + :func:`~SigningKey.sign_deterministic` in such case. :return: encoded signature of the hash of `data` - :rtype: bytes or sigencode function dependant type + :rtype: bytes or sigencode function dependent type """ hashfunc = hashfunc or self.default_hashfunc data = normalise_bytes(data) + if isinstance(self.curve.curve, CurveEdTw): + return self.sign_deterministic(data) h = hashfunc(data).digest() return self.sign_digest(h, entropy, sigencode, k, allow_truncate) @@ -1347,7 +1557,7 @@ def sign_digest( instead of this one. :param digest: hash value that will be signed - :type digest: bytes like object + :type digest: :term:`bytes-like object` :param callable entropy: randomness source, os.urandom by default :param sigencode: function used to encode the signature. The function needs to accept three parameters: the two integers @@ -1365,21 +1575,22 @@ def sign_digest( SHA-384 output using NIST256p or in similar situations. :raises RSZeroError: in the unlikely event when "r" parameter or - "s" parameter is equal 0 as that would leak the key. Calee should - try a better entropy source in such case. + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. :return: encoded signature for the `digest` hash - :rtype: bytes or sigencode function dependant type + :rtype: bytes or sigencode function dependent type """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") digest = normalise_bytes(digest) - if allow_truncate: - digest = digest[: self.curve.baselen] - if len(digest) > self.curve.baselen: - raise BadDigestError( - "this curve (%s) is too short " - "for your digest (%d)" % (self.curve.name, 8 * len(digest)) - ) - number = string_to_number(digest) + number = _truncate_and_convert_digest( + digest, + self.curve, + allow_truncate, + ) r, s = self.sign_number(number, entropy, k) return sigencode(r, s, self.privkey.order) @@ -1398,12 +1609,16 @@ def sign_number(self, number, entropy=None, k=None): it will be selected at random using the entropy source. :raises RSZeroError: in the unlikely event when "r" parameter or - "s" parameter is equal 0 as that would leak the key. Calee should - try a different 'k' in such case. + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. :return: the "r" and "s" parameters of the signature :rtype: tuple of ints """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") order = self.privkey.order if k is not None: diff --git a/src/ecdsa/numbertheory.py b/src/ecdsa/numbertheory.py index e5cc888d..fe974f8e 100644 --- a/src/ecdsa/numbertheory.py +++ b/src/ecdsa/numbertheory.py @@ -7,10 +7,11 @@ # Written in 2005 and 2006 by Peter Pearson and placed in the public domain. # Revision history: # 2008.11.14: Use pow(base, exponent, modulus) for modular_exp. -# Make gcd and lcm accept arbitrarly many arguments. +# Make gcd and lcm accept arbitrarily many arguments. from __future__ import division +import sys from six import integer_types, PY2 from six.moves import reduce @@ -19,11 +20,11 @@ except NameError: xrange = range try: - from gmpy2 import powmod + from gmpy2 import powmod, mpz GMPY2 = True GMPY = False -except ImportError: +except ImportError: # pragma: no branch GMPY2 = False try: from gmpy import mpz @@ -32,8 +33,15 @@ except ImportError: GMPY = False + +if GMPY2 or GMPY: # pragma: no branch + integer_types = tuple(integer_types + (type(mpz(1)),)) + + import math import warnings +import random +from .util import bit_length class Error(Exception): @@ -42,6 +50,10 @@ class Error(Exception): pass +class JacobiError(Error): + pass + + class SquareRootError(Error): pass @@ -153,8 +165,10 @@ def jacobi(a, n): # table printed in HAC, and by extensive use in calculating # modular square roots. - assert n >= 3 - assert n % 2 == 1 + if not n >= 3: + raise JacobiError("n must be larger than 2") + if not n % 2 == 1: + raise JacobiError("n must be odd") a = a % n if a == 0: return 0 @@ -201,66 +215,76 @@ def square_root_mod_prime(a, p): d = pow(a, (p - 1) // 4, p) if d == 1: return pow(a, (p + 3) // 8, p) - if d == p - 1: - return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p - raise RuntimeError("Shouldn't get here.") + assert d == p - 1 + return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p if PY2: # xrange on python2 can take integers representable as C long only range_top = min(0x7FFFFFFF, p) else: range_top = p - for b in xrange(2, range_top): + for b in xrange(2, range_top): # pragma: no branch if jacobi(b * b - 4 * a, p) == -1: f = (a, -b, 1) ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p) - assert ff[1] == 0 + if ff[1]: + raise SquareRootError("p is not prime") return ff[0] - raise RuntimeError("No b found.") + # just an assertion + raise RuntimeError("No b found.") # pragma: no cover -if GMPY2: +# because all the inverse_mod code is arch/environment specific, and coveralls +# expects it to execute equal number of times, we need to waive it by +# adding the "no branch" pragma to all branches +if GMPY2: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" - if a == 0: + if a == 0: # pragma: no branch return 0 return powmod(a, -1, m) - -elif GMPY: +elif GMPY: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" - # while libgmp likely does support inverses modulo, it is accessible - # only using the native `pow()` function, and `pow()` sanity checks - # the parameters before passing them on to underlying implementation - # on Python2 - if a == 0: + # while libgmp does support inverses modulo, it is accessible + # only using the native `pow()` function, and `pow()` in gmpy sanity + # checks the parameters before passing them on to underlying + # implementation + if a == 0: # pragma: no branch return 0 a = mpz(a) m = mpz(m) lm, hm = mpz(1), mpz(0) low, high = a % m, m - while low > 1: + while low > 1: # pragma: no branch r = high // low lm, low, hm, high = hm - lm * r, high - low * r, lm, low return lm % m +elif sys.version_info >= (3, 8): # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + if a == 0: # pragma: no branch + return 0 + return pow(a, -1, m) -else: +else: # pragma: no branch def inverse_mod(a, m): """Inverse of a mod m.""" - if a == 0: + if a == 0: # pragma: no branch return 0 lm, hm = 1, 0 low, high = a % m, m - while low > 1: + while low > 1: # pragma: no branch r = high // low lm, low, hm, high = hm - lm * r, high - low * r, lm, low @@ -321,7 +345,6 @@ def factorization(n): return [] result = [] - d = 2 # Test the small primes: @@ -331,7 +354,7 @@ def factorization(n): q, r = divmod(n, d) if r == 0: count = 1 - while d <= n: + while d <= n: # pragma: no branch n = q q, r = divmod(n, d) if r != 0: @@ -355,7 +378,8 @@ def factorization(n): if r == 0: # d divides n. How many times? count = 1 n = q - while d <= n: # As long as d might still divide n, + # As long as d might still divide n, + while d <= n: # pragma: no branch q, r = divmod(n, d) # see if it does. if r != 0: break @@ -374,7 +398,7 @@ def phi(n): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -404,7 +428,7 @@ def carmichael(n): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -419,7 +443,7 @@ def carmichael_of_factorized(f_list): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -439,7 +463,7 @@ def carmichael_of_ppower(pp): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -456,7 +480,7 @@ def order_mod(x, m): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -482,7 +506,7 @@ def largest_factor_relatively_prime(a, b): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -507,7 +531,7 @@ def kinda_order_mod(x, m): # pragma: no cover warnings.warn( "Function is unused by library code. If you use this code, " "please open an issue in " - "https://github.com/warner/python-ecdsa", + "https://github.com/tlsfuzzer/python-ecdsa", DeprecationWarning, ) @@ -540,8 +564,8 @@ def is_prime(n): return True else: return False - - if gcd(n, 2 * 3 * 5 * 7 * 11) != 1: + # 2310 = 2 * 3 * 5 * 7 * 11 + if gcd(n, 2310) != 1: return False # Choose a number of iterations sufficient to reduce the @@ -549,7 +573,8 @@ def is_prime(n): # (from Menezes et al. Table 4.4): t = 40 - n_bits = 1 + int(math.log(n, 2)) + n_bits = 1 + bit_length(n) + assert 11 <= n_bits <= 16384 for k, tt in ( (100, 27), (150, 18), @@ -576,7 +601,7 @@ def is_prime(n): s = s + 1 r = r // 2 for i in xrange(t): - a = smallprimes[i] + a = random.choice(smallprimes) y = pow(a, r, n) if y != 1 and y != n - 1: j = 1 diff --git a/src/ecdsa/rfc6979.py b/src/ecdsa/rfc6979.py index 1e577c0a..0728b5a4 100644 --- a/src/ecdsa/rfc6979.py +++ b/src/ecdsa/rfc6979.py @@ -42,14 +42,17 @@ def bits2octets(data, order): # https://tools.ietf.org/html/rfc6979#section-3.2 def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""): """ - order - order of the DSA generator used in the signature - secexp - secure exponent (private key) in numeric form - hash_func - reference to the same hash function used for generating - hash - data - hash in binary form of the signing data - retry_gen - int - how many good 'k' values to skip before returning - extra_entropy - extra added data in binary form as per section-3.6 of - rfc6979 + Generate the ``k`` value - the nonce for DSA. + + :param int order: order of the DSA generator used in the signature + :param int secexp: secure exponent (private key) in numeric form + :param hash_func: reference to the same hash function used for generating + hash, like :py:class:`hashlib.sha1` + :param bytes data: hash in binary form of the signing data + :param int retry_gen: how many good 'k' values to skip before returning + :param bytes extra_entropy: additional added data in binary form as per + section-3.6 of rfc6979 + :rtype: int """ qlen = bit_length(order) diff --git a/src/ecdsa/ssh.py b/src/ecdsa/ssh.py new file mode 100644 index 00000000..64e94030 --- /dev/null +++ b/src/ecdsa/ssh.py @@ -0,0 +1,83 @@ +import binascii +from . import der +from ._compat import compat26_str, int_to_bytes + +_SSH_ED25519 = b"ssh-ed25519" +_SK_MAGIC = b"openssh-key-v1\0" +_NONE = b"none" + + +def _get_key_type(name): + if name == "Ed25519": + return _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + +class _Serializer: + def __init__(self): + self.bytes = b"" + + def put_raw(self, val): + self.bytes += val + + def put_u32(self, val): + self.bytes += int_to_bytes(val, length=4, byteorder="big") + + def put_str(self, val): + self.put_u32(len(val)) + self.bytes += val + + def put_pad(self, blklen=8): + padlen = blklen - (len(self.bytes) % blklen) + self.put_raw(bytearray(range(1, 1 + padlen))) + + def encode(self): + return binascii.b2a_base64(compat26_str(self.bytes)) + + def tobytes(self): + return self.bytes + + def topem(self): + return der.topem(self.bytes, "OPENSSH PRIVATE KEY") + + +def serialize_public(name, pub): + serial = _Serializer() + ktype = _get_key_type(name) + serial.put_str(ktype) + serial.put_str(pub) + return b" ".join([ktype, serial.encode()]) + + +def serialize_private(name, pub, priv): + # encode public part + spub = _Serializer() + ktype = _get_key_type(name) + spub.put_str(ktype) + spub.put_str(pub) + + # encode private part + spriv = _Serializer() + checksum = 0 + spriv.put_u32(checksum) + spriv.put_u32(checksum) + spriv.put_raw(spub.tobytes()) + spriv.put_str(priv + pub) + comment = b"" + spriv.put_str(comment) + spriv.put_pad() + + # top-level structure + main = _Serializer() + main.put_raw(_SK_MAGIC) + ciphername = kdfname = _NONE + main.put_str(ciphername) + main.put_str(kdfname) + nokdf = 0 + main.put_u32(nokdf) + nkeys = 1 + main.put_u32(nkeys) + main.put_str(spub.tobytes()) + main.put_str(spriv.tobytes()) + return main.topem() diff --git a/src/ecdsa/test_curves.py b/src/ecdsa/test_curves.py new file mode 100644 index 00000000..93b6c9bd --- /dev/null +++ b/src/ecdsa/test_curves.py @@ -0,0 +1,361 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest + +import base64 +import pytest +from .curves import ( + Curve, + NIST256p, + curves, + UnknownCurveError, + PRIME_FIELD_OID, + curve_by_name, +) +from .ellipticcurve import CurveFp, PointJacobi, CurveEdTw +from . import der +from .util import number_to_string + + +class TestParameterEncoding(unittest.TestCase): + @classmethod + def setUpClass(cls): + # minimal, but with cofactor (excludes seed when compared to + # OpenSSL output) + cls.base64_params = ( + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=" + ) + + def test_from_pem(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + curve = Curve.from_pem(pem_params) + + self.assertIs(curve, NIST256p) + + def test_from_pem_with_explicit_when_explicit_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["named_curve"]) + + self.assertIn("explicit curve parameters not", str(e.exception)) + + def test_from_pem_with_named_curve_with_named_curve_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "BggqhkjOPQMBBw==\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["explicit"]) + + self.assertIn("named_curve curve parameters not", str(e.exception)) + + def test_from_pem_with_wrong_header(self): + pem_params = ( + "-----BEGIN PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params) + + self.assertIn("PARAMETERS PEM header", str(e.exception)) + + def test_to_pem(self): + pem_params = ( + b"-----BEGIN EC PARAMETERS-----\n" + b"BggqhkjOPQMBBw==\n" + b"-----END EC PARAMETERS-----\n" + ) + encoding = NIST256p.to_pem() + + self.assertEqual(pem_params, encoding) + + def test_compare_with_different_object(self): + self.assertNotEqual(NIST256p, 256) + + def test_named_curve_params_der(self): + encoded = NIST256p.to_der() + + # just the encoding of the NIST256p OID (prime256v1) + self.assertEqual(b"\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07", encoded) + + def test_verify_that_default_is_named_curve_der(self): + encoded_default = NIST256p.to_der() + encoded_named = NIST256p.to_der("named_curve") + + self.assertEqual(encoded_default, encoded_named) + + def test_encoding_to_explicit_params(self): + encoded = NIST256p.to_der("explicit") + + self.assertEqual(encoded, bytes(base64.b64decode(self.base64_params))) + + def test_encoding_to_unsupported_type(self): + with self.assertRaises(ValueError) as e: + NIST256p.to_der("unsupported") + + self.assertIn("Only 'named_curve'", str(e.exception)) + + def test_encoding_to_explicit_compressed_params(self): + encoded = NIST256p.to_der("explicit", "compressed") + + compressed_base_point = ( + "MIHAAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEIQNrF9Hy4SxCR/i85uVjpEDydwN9" + "gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVR" + "AgEB" + ) + + self.assertEqual( + encoded, bytes(base64.b64decode(compressed_base_point)) + ) + + def test_decoding_explicit_from_openssl(self): + # generated with openssl 1.1.1k using + # openssl ecparam -name P-256 -param_enc explicit -out /tmp/file.pem + p256_explicit = ( + "MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+" + "kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK" + "fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz" + "ucrC/GMlUQIBAQ==" + ) + + decoded = Curve.from_der(bytes(base64.b64decode(p256_explicit))) + + self.assertEqual(NIST256p, decoded) + + def test_decoding_well_known_from_explicit_params(self): + curve = Curve.from_der(bytes(base64.b64decode(self.base64_params))) + + self.assertIs(curve, NIST256p) + + def test_decoding_with_incorrect_valid_encodings(self): + with self.assertRaises(ValueError) as e: + Curve.from_der(b"", ["explicitCA"]) + + self.assertIn("Only named_curve", str(e.exception)) + + def test_compare_curves_with_different_generators(self): + curve_fp = CurveFp(23, 1, 7) + base_a = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + base_b = PointJacobi(curve_fp, 1, 20, 1, 9, generator=True) + + curve_a = Curve("unknown", curve_fp, base_a, None) + curve_b = Curve("unknown", curve_fp, base_b, None) + + self.assertNotEqual(curve_a, curve_b) + + def test_default_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + encoded = curve.to_der() + + decoded = Curve.from_der(encoded) + + self.assertEqual(curve, decoded) + + expected = "MCECAQEwDAYHKoZIzj0BAQIBFzAGBAEBBAEHBAMEDQMCAQk=" + + self.assertEqual(encoded, bytes(base64.b64decode(expected))) + + def test_named_curve_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + with self.assertRaises(UnknownCurveError) as e: + curve.to_der("named_curve") + + self.assertIn("Can't encode curve", str(e.exception)) + + def test_try_decoding_binary_explicit(self): + sect113r1_explicit = ( + "MIGRAgEBMBwGByqGSM49AQIwEQIBcQYJKoZIzj0BAgMCAgEJMDkEDwAwiCUMpufH" + "/mSc6Fgg9wQPAOi+5NPiJgdEGIvg6ccjAxUAEOcjqxTWluZ2h1YVF1b+v4/LSakE" + "HwQAnXNhbzX0qxQH1zViwQ8ApSgwJ3lY7oTRMV7TGIYCDwEAAAAAAAAA2czsijnl" + "bwIBAg==" + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(base64.b64decode(sect113r1_explicit)) + + self.assertIn("Characteristic 2 curves unsupported", str(e.exception)) + + def test_decode_malformed_named_curve(self): + bad_der = der.encode_oid(*NIST256p.oid) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after OID", str(e.exception)) + + def test_decode_malformed_explicit_garbage_after_ECParam(self): + bad_der = bytes( + base64.b64decode(self.base64_params) + ) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after ECParameters", str(e.exception)) + + def test_decode_malformed_unknown_version_number(self): + bad_der = der.encode_sequence(der.encode_integer(2)) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown parameter encoding format", str(e.exception)) + + def test_decode_malformed_unknown_field_type(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(1, 2, 3), der.encode_integer(curve_p) + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown field type: (1, 2, 3)", str(e.exception)) + + def test_decode_malformed_garbage_after_prime(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), + der.encode_integer(curve_p), + der.encode_integer(1), + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Prime-p element", str(e.exception)) + + +class TestCurveSearching(unittest.TestCase): + def test_correct_name(self): + c = curve_by_name("NIST256p") + self.assertIs(c, NIST256p) + + def test_openssl_name(self): + c = curve_by_name("prime256v1") + self.assertIs(c, NIST256p) + + def test_unknown_curve(self): + with self.assertRaises(UnknownCurveError) as e: + curve_by_name("foo bar") + + self.assertIn( + "name 'foo bar' unknown, only curves supported: " + "['NIST192p', 'NIST224p'", + str(e.exception), + ) + + def test_with_None_as_parameter(self): + with self.assertRaises(UnknownCurveError) as e: + curve_by_name(None) + + self.assertIn( + "name None unknown, only curves supported: " + "['NIST192p', 'NIST224p'", + str(e.exception), + ) + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_named(curve): + ret = Curve.from_der(curve.to_der("named_curve")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit(curve): + if isinstance(curve.curve, CurveEdTw): + with pytest.raises(UnknownCurveError): + curve.to_der("explicit") + else: + ret = Curve.from_der(curve.to_der("explicit")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_default(curve): + ret = Curve.from_der(curve.to_der()) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit_compressed(curve): + if isinstance(curve.curve, CurveEdTw): + with pytest.raises(UnknownCurveError): + curve.to_der("explicit", "compressed") + else: + ret = Curve.from_der(curve.to_der("explicit", "compressed")) + + assert curve == ret diff --git a/src/ecdsa/test_der.py b/src/ecdsa/test_der.py index 746d9277..b0955431 100644 --- a/src/ecdsa/test_der.py +++ b/src/ecdsa/test_der.py @@ -7,9 +7,9 @@ import unittest2 as unittest except ImportError: import unittest -from six import b +import sys import hypothesis.strategies as st -from hypothesis import given, example +from hypothesis import given, settings import pytest from ._compat import str_idx_as_int from .curves import NIST256p, NIST224p @@ -21,6 +21,11 @@ remove_bitstring, remove_object, encode_oid, + remove_constructed, + remove_implicit, + remove_octet_string, + remove_sequence, + encode_implicit, ) @@ -29,47 +34,59 @@ class TestRemoveInteger(unittest.TestCase): # interpreted as negative, check if those errors are detected def test_non_minimal_encoding(self): with self.assertRaises(UnexpectedDER): - remove_integer(b("\x02\x02\x00\x01")) + remove_integer(b"\x02\x02\x00\x01") def test_negative_with_high_bit_set(self): with self.assertRaises(UnexpectedDER): - remove_integer(b("\x02\x01\x80")) + remove_integer(b"\x02\x01\x80") def test_minimal_with_high_bit_set(self): - val, rem = remove_integer(b("\x02\x02\x00\x80")) + val, rem = remove_integer(b"\x02\x02\x00\x80") self.assertEqual(val, 0x80) - self.assertFalse(rem) + self.assertEqual(rem, b"") def test_two_zero_bytes_with_high_bit_set(self): with self.assertRaises(UnexpectedDER): - remove_integer(b("\x02\x03\x00\x00\xff")) + remove_integer(b"\x02\x03\x00\x00\xff") def test_zero_length_integer(self): with self.assertRaises(UnexpectedDER): - remove_integer(b("\x02\x00")) + remove_integer(b"\x02\x00") def test_empty_string(self): with self.assertRaises(UnexpectedDER): - remove_integer(b("")) + remove_integer(b"") def test_encoding_of_zero(self): - val, rem = remove_integer(b("\x02\x01\x00")) + val, rem = remove_integer(b"\x02\x01\x00") self.assertEqual(val, 0) - self.assertFalse(rem) + self.assertEqual(rem, b"") def test_encoding_of_127(self): - val, rem = remove_integer(b("\x02\x01\x7f")) + val, rem = remove_integer(b"\x02\x01\x7f") self.assertEqual(val, 127) - self.assertFalse(rem) + self.assertEqual(rem, b"") def test_encoding_of_128(self): - val, rem = remove_integer(b("\x02\x02\x00\x80")) + val, rem = remove_integer(b"\x02\x02\x00\x80") self.assertEqual(val, 128) - self.assertFalse(rem) + self.assertEqual(rem, b"") + + def test_wrong_tag(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x01\x02\x00\x80") + + self.assertIn("wanted type 'integer'", str(e.exception)) + + def test_wrong_length(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x02\x03\x00\x80") + + self.assertIn("Length longer", str(e.exception)) class TestReadLength(unittest.TestCase): @@ -77,37 +94,37 @@ class TestReadLength(unittest.TestCase): # form and lengths above that encoded with minimal number of bytes # necessary def test_zero_length(self): - self.assertEqual((0, 1), read_length(b("\x00"))) + self.assertEqual((0, 1), read_length(b"\x00")) def test_two_byte_zero_length(self): with self.assertRaises(UnexpectedDER): - read_length(b("\x81\x00")) + read_length(b"\x81\x00") def test_two_byte_small_length(self): with self.assertRaises(UnexpectedDER): - read_length(b("\x81\x7f")) + read_length(b"\x81\x7f") def test_long_form_with_zero_length(self): with self.assertRaises(UnexpectedDER): - read_length(b("\x80")) + read_length(b"\x80") def test_smallest_two_byte_length(self): - self.assertEqual((128, 2), read_length(b("\x81\x80"))) + self.assertEqual((128, 2), read_length(b"\x81\x80")) def test_zero_padded_length(self): with self.assertRaises(UnexpectedDER): - read_length(b("\x82\x00\x80")) + read_length(b"\x82\x00\x80") def test_two_three_byte_length(self): self.assertEqual((256, 3), read_length(b"\x82\x01\x00")) def test_empty_string(self): with self.assertRaises(UnexpectedDER): - read_length(b("")) + read_length(b"") def test_length_overflow(self): with self.assertRaises(UnexpectedDER): - read_length(b("\x83\x01\x00")) + read_length(b"\x83\x01\x00") class TestEncodeBitstring(unittest.TestCase): @@ -129,26 +146,22 @@ def test_old_call_convention(self): def test_new_call_convention(self): """This is how it should be called now.""" - warnings.simplefilter("always") - with pytest.warns(None) as warns: + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") der = encode_bitstring(b"\xff", 0) - # verify that new call convention doesn't raise Warnings - self.assertEqual(len(warns), 0) - self.assertEqual(der, b"\x03\x02\x00\xff") def test_implicit_unused_bits(self): """ Writing bit string with already included the number of unused bits. """ - warnings.simplefilter("always") - with pytest.warns(None) as warns: + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") der = encode_bitstring(b"\x00\xff", None) - # verify that new call convention doesn't raise Warnings - self.assertEqual(len(warns), 0) - self.assertEqual(der, b"\x03\x02\x00\xff") def test_explicit_unused_bits(self): @@ -188,22 +201,20 @@ def test_old_call_convention(self): self.assertEqual(rest, b"") def test_new_call_convention(self): - warnings.simplefilter("always") - with pytest.warns(None) as warns: + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0) - self.assertEqual(len(warns), 0) - self.assertEqual(bits, b"\xff") self.assertEqual(rest, b"") def test_implicit_unexpected_unused(self): - warnings.simplefilter("always") - with pytest.warns(None) as warns: + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None) - self.assertEqual(len(warns), 0) - self.assertEqual(bits, (b"\xff", 0)) self.assertEqual(rest, b"") @@ -260,10 +271,10 @@ def test_bytearray(self): class TestEncodeOid(unittest.TestCase): def test_pub_key_oid(self): oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) - self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201")) + self.assertEqual(hexlify(oid_ecPublicKey), b"06072a8648ce3d0201") def test_nist224p_oid(self): - self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021")) + self.assertEqual(hexlify(NIST224p.encoded_oid), b"06052b81040021") def test_nist256p_oid(self): self.assertEqual( @@ -368,8 +379,194 @@ def test_with_too_long_length(self): remove_object(b"\x06\x03\x88\x37") +class TestRemoveConstructed(unittest.TestCase): + def test_simple(self): + data = b"\xa1\x02\xff\xaa" + + tag, body, rest = remove_constructed(data) + + self.assertEqual(tag, 0x01) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x01\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_constructed(data) + + self.assertIn("constructed tag", str(e.exception)) + + +class TestRemoveImplicit(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.exp_tag = 6 + cls.exp_data = b"\x0a\x0b" + # data with application tag class + cls.data_application = b"\x46\x02\x0a\x0b" + # data with context-specific tag class + cls.data_context_specific = b"\x86\x02\x0a\x0b" + # data with private tag class + cls.data_private = b"\xc6\x02\x0a\x0b" + + def test_simple(self): + tag, body, rest = remove_implicit(self.data_context_specific) + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_wrong_expected_class(self): + with self.assertRaises(ValueError) as e: + remove_implicit(self.data_context_specific, "foobar") + + self.assertIn("invalid `exp_class` value", str(e.exception)) + + def test_with_wrong_class(self): + with self.assertRaises(UnexpectedDER) as e: + remove_implicit(self.data_application) + + self.assertIn( + "wanted class context-specific, got 0x46 tag", str(e.exception) + ) + + def test_with_application_class(self): + tag, body, rest = remove_implicit(self.data_application, "application") + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_with_private_class(self): + tag, body, rest = remove_implicit(self.data_private, "private") + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_with_data_following(self): + extra_data = b"\x00\x01" + + tag, body, rest = remove_implicit( + self.data_context_specific + extra_data + ) + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, extra_data) + + def test_with_constructed(self): + data = b"\xa6\x02\x0a\x0b" + + with self.assertRaises(UnexpectedDER) as e: + remove_implicit(data) + + self.assertIn("wanted type primitive, got 0xa6 tag", str(e.exception)) + + def test_encode_decode(self): + data = b"some longish string" + + tag, body, rest = remove_implicit( + encode_implicit(6, data, "application"), "application" + ) + + self.assertEqual(tag, 6) + self.assertEqual(body, data) + self.assertEqual(rest, b"") + + +class TestEncodeImplicit(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.data = b"\x0a\x0b" + # data with application tag class + cls.data_application = b"\x46\x02\x0a\x0b" + # data with context-specific tag class + cls.data_context_specific = b"\x86\x02\x0a\x0b" + # data with private tag class + cls.data_private = b"\xc6\x02\x0a\x0b" + + def test_encode_with_default_class(self): + ret = encode_implicit(6, self.data) + + self.assertEqual(ret, self.data_context_specific) + + def test_encode_with_application_class(self): + ret = encode_implicit(6, self.data, "application") + + self.assertEqual(ret, self.data_application) + + def test_encode_with_context_specific_class(self): + ret = encode_implicit(6, self.data, "context-specific") + + self.assertEqual(ret, self.data_context_specific) + + def test_encode_with_private_class(self): + ret = encode_implicit(6, self.data, "private") + + self.assertEqual(ret, self.data_private) + + def test_encode_with_invalid_class(self): + with self.assertRaises(ValueError) as e: + encode_implicit(6, self.data, "foobar") + + self.assertIn("invalid tag class", str(e.exception)) + + def test_encode_with_too_large_tag(self): + with self.assertRaises(ValueError) as e: + encode_implicit(32, self.data) + + self.assertIn("Long tags not supported", str(e.exception)) + + +class TestRemoveOctetString(unittest.TestCase): + def test_simple(self): + data = b"\x04\x03\xaa\xbb\xcc" + body, rest = remove_octet_string(data) + self.assertEqual(body, b"\xaa\xbb\xcc") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x03\x03\xaa\xbb\xcc" + with self.assertRaises(UnexpectedDER) as e: + remove_octet_string(data) + + self.assertIn("octetstring", str(e.exception)) + + +class TestRemoveSequence(unittest.TestCase): + def test_simple(self): + data = b"\x30\x02\xff\xaa" + body, rest = remove_sequence(data) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_empty_string(self): + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(b"") + + self.assertIn("Empty string", str(e.exception)) + + def test_with_wrong_tag(self): + data = b"\x20\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("wanted type 'sequence'", str(e.exception)) + + def test_with_wrong_length(self): + data = b"\x30\x03\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("Length longer", str(e.exception)) + + @st.composite -def st_oid(draw, max_value=2 ** 512, max_size=50): +def st_oid(draw, max_value=2**512, max_size=50): """ Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples @@ -389,6 +586,14 @@ def st_oid(draw, max_value=2 ** 512, max_size=50): return (first, second) + tuple(rest) +HYP_SETTINGS = {} + + +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 + + +@settings(**HYP_SETTINGS) @given(st_oid()) def test_oids(ids): encoded_oid = encode_oid(*ids) diff --git a/src/ecdsa/test_ecdh.py b/src/ecdsa/test_ecdh.py index caf68352..cb225803 100644 --- a/src/ecdsa/test_ecdh.py +++ b/src/ecdsa/test_ecdh.py @@ -1,19 +1,49 @@ import os +import sys import shutil import subprocess import pytest -from binascii import hexlify, unhexlify - -from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p +from binascii import unhexlify + +try: + import unittest2 as unittest +except ImportError: + import unittest + +from .curves import ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + BRAINPOOLP160r1, + SECP112r2, + SECP128r1, +) from .curves import curves -from .ecdh import ECDH, InvalidCurveError, InvalidSharedSecretError, NoKeyError +from .ecdh import ( + ECDH, + InvalidCurveError, + InvalidSharedSecretError, + NoKeyError, + NoCurveError, +) from .keys import SigningKey, VerifyingKey +from .ellipticcurve import CurveEdTw + + +if "--fast" in sys.argv: # pragma: no cover + curves = [SECP112r2, SECP128r1] @pytest.mark.parametrize( - "vcurve", curves, ids=[curve.name for curve in curves] + "vcurve", + curves, + ids=[curve.name for curve in curves], ) def test_ecdh_each(vcurve): + if isinstance(vcurve.curve, CurveEdTw): + pytest.skip("ECDH is not supported for Edwards curves") ecdh1 = ECDH(curve=vcurve) ecdh2 = ECDH(curve=vcurve) @@ -26,6 +56,19 @@ def test_ecdh_each(vcurve): assert secret1 == secret2 +def test_ecdh_both_keys_present(): + key1 = SigningKey.generate(BRAINPOOLP160r1) + key2 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1 = ECDH(BRAINPOOLP160r1, key1, key2.verifying_key) + ecdh2 = ECDH(private_key=key2, public_key=key1.verifying_key) + + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + + assert secret1 == secret2 + + def test_ecdh_no_public_key(): ecdh1 = ECDH(curve=NIST192p) @@ -38,6 +81,44 @@ def test_ecdh_no_public_key(): ecdh1.generate_sharedsecret_bytes() +class TestECDH(unittest.TestCase): + def test_load_key_from_wrong_curve(self): + ecdh1 = ECDH() + ecdh1.set_curve(NIST192p) + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + with self.assertRaises(InvalidCurveError) as e: + ecdh1.load_private_key(key1) + + self.assertIn("Curve mismatch", str(e.exception)) + + def test_generate_without_curve(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.generate_private_key() + + self.assertIn("Curve must be set", str(e.exception)) + + def test_load_bytes_without_curve_set(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.load_private_key_bytes(b"\x01" * 32) + + self.assertIn("Curve must be set", str(e.exception)) + + def test_set_curve_from_received_public_key(self): + ecdh1 = ECDH() + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1.load_received_public_key(key1.verifying_key) + + self.assertEqual(ecdh1.curve, BRAINPOOLP160r1) + + def test_ecdh_wrong_public_key_curve(): ecdh1 = ECDH(curve=NIST192p) ecdh1.generate_private_key() @@ -292,26 +373,29 @@ def run_openssl(cmd): ) +@pytest.mark.slow @pytest.mark.parametrize( - "vcurve", curves, ids=[curve.name for curve in curves] + "vcurve", + curves, + ids=[curve.name for curve in curves], ) def test_ecdh_with_openssl(vcurve): + if isinstance(vcurve.curve, CurveEdTw): + pytest.skip("Edwards curves are not supported for ECDH") + assert vcurve.openssl_name if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES: pytest.skip("system openssl does not support " + vcurve.openssl_name) - return try: hlp = run_openssl("pkeyutl -help") - if hlp.find("-derive") == 0: + if hlp.find("-derive") == 0: # pragma: no cover pytest.skip("system openssl does not support `pkeyutl -derive`") - return - except RunOpenSslError: - pytest.skip("system openssl does not support `pkeyutl -derive`") - return + except RunOpenSslError: # pragma: no cover + pytest.skip("system openssl could not be executed") - if os.path.isdir("t"): + if os.path.isdir("t"): # pragma: no branch shutil.rmtree("t") os.mkdir("t") run_openssl( @@ -346,25 +430,20 @@ def test_ecdh_with_openssl(vcurve): assert secret1 == secret2 - try: - run_openssl( - "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" - ) - run_openssl( - "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" - ) - except RunOpenSslError: - pytest.skip("system openssl does not support `pkeyutl -derive`") - return + run_openssl( + "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" + ) + run_openssl( + "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" + ) with open("t/secret1", "rb") as e: ssl_secret1 = e.read() with open("t/secret1", "rb") as e: ssl_secret2 = e.read() - if len(ssl_secret1) != vk1.curve.baselen: - pytest.skip("system openssl does not support `pkeyutl -derive`") - return + assert len(ssl_secret1) == vk1.curve.verifying_key_length // 2 + assert len(secret1) == vk1.curve.verifying_key_length // 2 assert ssl_secret1 == ssl_secret2 assert secret1 == ssl_secret1 diff --git a/src/ecdsa/test_ecdsa.py b/src/ecdsa/test_ecdsa.py index e656b880..c1e25829 100644 --- a/src/ecdsa/test_ecdsa.py +++ b/src/ecdsa/test_ecdsa.py @@ -21,7 +21,13 @@ generator_384, generator_521, generator_secp256k1, + curve_192, + InvalidPointError, + curve_112r2, + generator_112r2, + int_to_string, ) +from .ellipticcurve import Point HYP_SETTINGS = {} @@ -71,6 +77,19 @@ def test_verification(self): def test_rejection(self): assert not self.pubk.verifies(self.msg - 1, self.sig) + def test_verification_with_regular_point(self): + pubk = Public_key( + Point( + generator_192.curve(), + generator_192.x(), + generator_192.y(), + generator_192.order(), + ), + self.pubk.point, + ) + + assert pubk.verifies(self.msg, self.sig) + class TestPublicKey(unittest.TestCase): def test_equality_public_keys(self): @@ -96,6 +115,20 @@ def test_inequality_public_key(self): pub_key2 = Public_key(gen, point2) self.assertNotEqual(pub_key1, pub_key2) + def test_inequality_different_curves(self): + gen = generator_192 + x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point1 = ellipticcurve.Point(gen.curve(), x1, y1) + + x2 = 0x722BA0FB6B8FC8898A4C6AB49E66 + y2 = 0x2B7344BB57A7ABC8CA0F1A398C7D + point2 = ellipticcurve.Point(generator_112r2.curve(), x2, y2) + + pub_key1 = Public_key(gen, point1) + pub_key2 = Public_key(generator_112r2, point2) + self.assertNotEqual(pub_key1, pub_key2) + def test_inequality_public_key_not_implemented(self): gen = generator_192 x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 @@ -104,6 +137,106 @@ def test_inequality_public_key_not_implemented(self): pub_key = Public_key(gen, point) self.assertNotEqual(pub_key, None) + def test_public_key_with_generator_without_order(self): + gen = ellipticcurve.PointJacobi( + generator_192.curve(), generator_192.x(), generator_192.y(), 1 + ) + + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + with self.assertRaises(InvalidPointError) as e: + Public_key(gen, point) + + self.assertIn("Generator point must have order", str(e.exception)) + + def test_public_point_on_curve_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + # we need a curve with cofactor != 1 + point = ellipticcurve.PointJacobi(curve_112r2, x, y, 1) + + self.assertTrue(curve_112r2.contains_point(x, y)) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_112r2, point) + + self.assertIn("Generator point order", str(e.exception)) + + def test_point_is_valid_with_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + + self.assertFalse(point_is_valid(generator_112r2, x, y)) + + # the tests to verify the extensiveness of tests in ecdsa.ecdsa + # if PointJacobi gets modified to calculate the x and y mod p the tests + # below will need to use a fake/mock object + def test_invalid_point_x_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, -1, 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_x_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, curve_192.p(), 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, -1, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, curve_192.p(), 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + +class TestPublicKeyVerifies(unittest.TestCase): + # test all the different ways that a signature can be publicly invalid + @classmethod + def setUpClass(cls): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + cls.pub_key = Public_key(gen, point) + + def test_sig_with_r_zero(self): + sig = Signature(0, 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_r_order(self): + sig = Signature(generator_192.order(), 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_zero(self): + sig = Signature(1, 0) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_order(self): + sig = Signature(1, generator_192.order()) + + self.assertFalse(self.pub_key.verifies(1, sig)) + class TestPrivateKey(unittest.TestCase): @classmethod @@ -465,7 +598,13 @@ def test_signature_validity(gen, msg, qx, qy, r, s, expected): elliptic curve of `gen`, `r` and `s` are the signature, and `expected` is True iff the signature is expected to be valid.""" pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy)) - assert expected == pubk.verifies(digest_integer(msg), Signature(r, s)) + with pytest.warns(DeprecationWarning) as warns: + msg_dgst = digest_integer(msg) + assert len(warns) == 3 + assert "unused" in warns[0].message.args[0] + assert "unused" in warns[1].message.args[0] + assert "unused" in warns[2].message.args[0] + assert expected == pubk.verifies(msg_dgst, Signature(r, s)) @pytest.mark.parametrize( @@ -474,7 +613,13 @@ def test_signature_validity(gen, msg, qx, qy, r, s, expected): def test_pk_recovery(gen, msg, r, s, qx, qy, expected): del expected sign = Signature(r, s) - pks = sign.recover_public_keys(digest_integer(msg), gen) + with pytest.warns(DeprecationWarning) as warns: + msg_dgst = digest_integer(msg) + assert len(warns) == 3 + assert "unused" in warns[0].message.args[0] + assert "unused" in warns[1].message.args[0] + assert "unused" in warns[2].message.args[0] + pks = sign.recover_public_keys(msg_dgst, gen) assert pks @@ -503,19 +648,22 @@ def st_random_gen_key_msg_nonce(draw): name = draw(st.sampled_from(sorted(name_gen.keys()))) note("Generator used: {0}".format(name)) generator = name_gen[name] - order = int(generator.order()) + order = int(generator.order()) - 1 key = draw(st.integers(min_value=1, max_value=order)) msg = draw(st.integers(min_value=1, max_value=order)) nonce = draw( - st.integers(min_value=1, max_value=order + 1) + st.integers(min_value=1, max_value=order) | st.integers(min_value=order >> 1, max_value=order) ) return generator, key, msg, nonce SIG_VER_SETTINGS = dict(HYP_SETTINGS) -SIG_VER_SETTINGS["max_examples"] = 10 +if "--fast" in sys.argv: # pragma: no cover + SIG_VER_SETTINGS["max_examples"] = 1 +else: + SIG_VER_SETTINGS["max_examples"] = 10 @settings(**SIG_VER_SETTINGS) @@ -536,3 +684,11 @@ def test_sig_verify(args): assert pubkey.verifies(msg, signature) assert not pubkey.verifies(msg - 1, signature) + + +def test_int_to_string_with_zero(): + with pytest.warns(DeprecationWarning) as warns: + assert int_to_string(0) == b"\x00" + + assert len(warns) == 1 + assert "unused" in warns[0].message.args[0] diff --git a/src/ecdsa/test_eddsa.py b/src/ecdsa/test_eddsa.py new file mode 100644 index 00000000..6821b3bc --- /dev/null +++ b/src/ecdsa/test_eddsa.py @@ -0,0 +1,1124 @@ +import sys +import pickle +import hashlib +import pytest + +try: + import unittest2 as unittest +except ImportError: + import unittest +from hypothesis import given, settings, example +import hypothesis.strategies as st +from .ellipticcurve import PointEdwards, INFINITY, CurveEdTw +from .eddsa import ( + generator_ed25519, + curve_ed25519, + generator_ed448, + curve_ed448, + PrivateKey, + PublicKey, +) +from .ecdsa import generator_256, curve_256 +from .errors import MalformedPointError +from ._compat import a2b_hex, compat26_str + + +class TestA2B_Hex(unittest.TestCase): + def test_invalid_input(self): + with self.assertRaises(ValueError): + a2b_hex("abcdefghi") + + +def test_ed25519_curve_compare(): + assert curve_ed25519 != curve_256 + + +def test_ed25519_and_ed448_compare(): + assert curve_ed448 != curve_ed25519 + + +def test_ed25519_and_custom_curve_compare(): + a = CurveEdTw(curve_ed25519.p(), -curve_ed25519.a(), 1) + + assert curve_ed25519 != a + + +def test_ed25519_and_almost_exact_curve_compare(): + a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), 1) + + assert curve_ed25519 != a + + +def test_ed25519_and_same_curve_params(): + a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), curve_ed25519.d()) + + assert curve_ed25519 == a + assert not (curve_ed25519 != a) + + +def test_ed25519_contains_point(): + g = generator_ed25519 + assert curve_ed25519.contains_point(g.x(), g.y()) + + +def test_ed25519_contains_point_bad(): + assert not curve_ed25519.contains_point(1, 1) + + +def test_ed25519_double(): + a = generator_ed25519 + + z = a.double() + + assert isinstance(z, PointEdwards) + + x2 = int( + "24727413235106541002554574571675588834622768167397638456726423" + "682521233608206" + ) + y2 = int( + "15549675580280190176352668710449542251549572066445060580507079" + "593062643049417" + ) + + b = PointEdwards(curve_ed25519, x2, y2, 1, x2 * y2) + + assert z == b + assert a != b + + +def test_ed25519_add_as_double(): + a = generator_ed25519 + + z = a + a + + assert isinstance(z, PointEdwards) + + b = generator_ed25519.double() + + assert z == b + + +def test_ed25519_double_infinity(): + a = PointEdwards(curve_ed25519, 0, 1, 1, 0) + + z = a.double() + + assert z is INFINITY + + +def test_ed25519_double_badly_encoded_infinity(): + # invalid point, mostly to make instrumental happy + a = PointEdwards(curve_ed25519, 1, 1, 1, 0) + + z = a.double() + + assert z is INFINITY + + +def test_ed25519_eq_with_different_z(): + x = generator_ed25519.x() + y = generator_ed25519.y() + p = curve_ed25519.p() + + a = PointEdwards(curve_ed25519, x * 2 % p, y * 2 % p, 2, x * y * 2 % p) + b = PointEdwards(curve_ed25519, x * 3 % p, y * 3 % p, 3, x * y * 3 % p) + + assert a == b + + assert not (a != b) + + +def test_ed25519_eq_against_infinity(): + assert generator_ed25519 != INFINITY + + +def test_ed25519_eq_encoded_infinity_against_infinity(): + a = PointEdwards(curve_ed25519, 0, 1, 1, 0) + assert a == INFINITY + + +def test_ed25519_eq_bad_encode_of_infinity_against_infinity(): + # technically incorrect encoding of the point at infinity, but we check + # both X and T, so verify that just T==0 works + a = PointEdwards(curve_ed25519, 1, 1, 1, 0) + assert a == INFINITY + + +def test_ed25519_eq_against_non_Edwards_point(): + assert generator_ed25519 != generator_256 + + +def test_ed25519_eq_against_negated_point(): + g = generator_ed25519 + neg = PointEdwards(curve_ed25519, -g.x(), g.y(), 1, -g.x() * g.y()) + assert g != neg + + +def test_ed25519_eq_x_different_y(): + # not points on the curve, but __eq__ doesn't care + a = PointEdwards(curve_ed25519, 1, 1, 1, 1) + b = PointEdwards(curve_ed25519, 1, 2, 1, 2) + + assert a != b + + +def test_ed25519_mul_by_order(): + g = PointEdwards( + curve_ed25519, + generator_ed25519.x(), + generator_ed25519.y(), + 1, + generator_ed25519.x() * generator_ed25519.y(), + ) + + assert g * generator_ed25519.order() == INFINITY + + +def test_radd(): + + a = PointEdwards(curve_ed25519, 1, 1, 1, 1) + + p = INFINITY + a + + assert p == a + + +def test_ed25519_test_normalisation_and_scaling(): + x = generator_ed25519.x() + y = generator_ed25519.y() + p = curve_ed25519.p() + + a = PointEdwards(curve_ed25519, x * 11 % p, y * 11 % p, 11, x * y * 11 % p) + + assert a.x() == x + assert a.y() == y + + a.scale() + + assert a.x() == x + assert a.y() == y + + a.scale() # second execution should be a noop + + assert a.x() == x + assert a.y() == y + + +def test_ed25519_add_three_times(): + a = generator_ed25519 + + z = a + a + a + + x3 = int( + "468967334644549386571235445953867877890461982801326656862413" + "21779790909858396" + ) + y3 = int( + "832484377853344397649037712036920113830141722629755531674120" + "2210403726505172" + ) + + b = PointEdwards(curve_ed25519, x3, y3, 1, x3 * y3) + + assert z == b + + +def test_ed25519_add_to_infinity(): + # generator * (order-1) + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + inf = inf_m_1 + generator_ed25519 + + assert inf is INFINITY + + +def test_ed25519_add_and_mul_equivalence(): + g = generator_ed25519 + + assert g + g == g * 2 + assert g + g + g == g * 3 + + +def test_ed25519_add_literal_infinity(): + g = generator_ed25519 + z = g + INFINITY + + assert z == g + + +def test_ed25519_add_infinity(): + inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) + g = generator_ed25519 + z = g + inf + + assert z == g + + z = inf + g + + assert z == g + + +class TestEd25519(unittest.TestCase): + def test_add_wrong_curves(self): + with self.assertRaises(ValueError) as e: + generator_ed25519 + generator_ed448 + + self.assertIn("different curve", str(e.exception)) + + def test_add_wrong_point_type(self): + with self.assertRaises(ValueError) as e: + generator_ed25519 + generator_256 + + self.assertIn("different curve", str(e.exception)) + + +def test_generate_with_point(): + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + p = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + pk = PublicKey(generator_ed25519, b"0" * 32, public_point=p) + + assert pk.public_point() == p + + +def test_ed25519_mul_to_order_min_1(): + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + assert generator_ed25519 * (generator_ed25519.order() - 1) == inf_m_1 + + +def test_ed25519_mul_to_infinity(): + assert generator_ed25519 * generator_ed25519.order() == INFINITY + + +def test_ed25519_mul_to_infinity_plus_1(): + g = generator_ed25519 + assert g * (g.order() + 1) == g + + +def test_ed25519_mul_and_add(): + g = generator_ed25519 + a = g * 128 + b = g * 64 + g * 64 + + assert a == b + + +def test_ed25519_mul_and_add_2(): + g = generator_ed25519 + + a = g * 123 + b = g * 120 + g * 3 + + assert a == b + + +def test_ed25519_mul_infinity(): + inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) + + z = inf * 11 + + assert z == INFINITY + + +def test_ed25519_mul_by_zero(): + z = generator_ed25519 * 0 + + assert z == INFINITY + + +def test_ed25519_mul_by_one(): + z = generator_ed25519 * 1 + + assert z == generator_ed25519 + + +def test_ed25519_mul_custom_point(): + # verify that multiplication without order set works + + g = generator_ed25519 + + a = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + z = a * 11 + + assert z == g * 11 + + +def test_ed25519_pickle(): + g = generator_ed25519 + assert pickle.loads(pickle.dumps(g)) == g + + +def test_ed448_eq_against_different_curve(): + assert generator_ed25519 != generator_ed448 + + +def test_ed448_double(): + g = generator_ed448 + z = g.double() + + assert isinstance(z, PointEdwards) + + x2 = int( + "4845591495304045936995492052586696895690942404582120401876" + "6013278705691214670908136440114445572635086627683154494739" + "7859048262938744149" + ) + y2 = int( + "4940887598674337276743026725267350893505445523037277237461" + "2648447308771911703729389009346215770388834286503647778745" + "3078312060500281069" + ) + + b = PointEdwards(curve_ed448, x2, y2, 1, x2 * y2) + + assert z == b + assert g != b + + +def test_ed448_add_as_double(): + g = generator_ed448 + z = g + g + + b = g.double() + + assert z == b + + +def test_ed448_mul_as_double(): + g = generator_ed448 + z = g * 2 + b = g.double() + + assert z == b + + +def test_ed448_add_to_infinity(): + # generator * (order - 1) + x1 = int( + "5022586839996825903617194737881084981068517190547539260353" + "6473749366191269932473977736719082931859264751085238669719" + "1187378895383117729" + ) + y1 = int( + "2988192100784814926760179304439306734375440401540802420959" + "2824137233150618983587600353687865541878473398230323350346" + "2500531545062832660" + ) + inf_m_1 = PointEdwards(curve_ed448, x1, y1, 1, x1 * y1) + + inf = inf_m_1 + generator_ed448 + + assert inf is INFINITY + + +def test_ed448_mul_to_infinity(): + g = generator_ed448 + inf = g * g.order() + + assert inf is INFINITY + + +def test_ed448_mul_to_infinity_plus_1(): + g = generator_ed448 + + z = g * (g.order() + 1) + + assert z == g + + +def test_ed448_add_and_mul_equivalence(): + g = generator_ed448 + + assert g + g == g * 2 + assert g + g + g == g * 3 + + +def test_ed25519_encode(): + g = generator_ed25519 + g_bytes = g.to_bytes() + assert len(g_bytes) == 32 + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + assert g_bytes == exp_bytes + + +def test_ed25519_decode(): + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + a = PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + assert a == generator_ed25519 + + +class TestEdwardsMalformed(unittest.TestCase): + def test_invalid_point(self): + exp_bytes = ( + b"\x78\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + with self.assertRaises(MalformedPointError): + PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + def test_invalid_length(self): + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66" + ) + with self.assertRaises(MalformedPointError) as e: + PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + self.assertIn("length", str(e.exception)) + + def test_ed448_invalid(self): + exp_bytes = b"\xff" * 57 + with self.assertRaises(MalformedPointError): + PointEdwards.from_bytes(curve_ed448, exp_bytes) + + +def test_ed448_encode(): + g = generator_ed448 + g_bytes = g.to_bytes() + assert len(g_bytes) == 57 + exp_bytes = ( + b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" + b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" + b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" + b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" + ) + assert g_bytes == exp_bytes + + +def test_ed448_decode(): + exp_bytes = ( + b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" + b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" + b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" + b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" + ) + + a = PointEdwards.from_bytes(curve_ed448, exp_bytes) + + assert a == generator_ed448 + + +class TestEdDSAEquality(unittest.TestCase): + def test_equal_public_points(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed25519, b"\x01" * 32) + + self.assertEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertFalse(key1 != key2) + + def test_unequal_public_points(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed25519, b"\x03" * 32) + + self.assertNotEqual(key1, key2) + + def test_unequal_to_string(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = b"\x01" * 32 + + self.assertNotEqual(key1, key2) + + def test_unequal_publickey_curves(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed448, b"\x03" * 56 + b"\x00") + + self.assertNotEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertTrue(key1 != key2) + + def test_equal_private_keys(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed25519, b"\x01" * 32) + + self.assertEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertFalse(key1 != key2) + + def test_unequal_private_keys(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed25519, b"\x02" * 32) + + self.assertNotEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertTrue(key1 != key2) + + def test_unequal_privatekey_to_string(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = b"\x01" * 32 + + self.assertNotEqual(key1, key2) + + def test_unequal_privatekey_curves(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed448, b"\x01" * 57) + + self.assertNotEqual(key1, key2) + + +class TestInvalidEdDSAInputs(unittest.TestCase): + def test_wrong_length_of_private_key(self): + with self.assertRaises(ValueError): + PrivateKey(generator_ed25519, b"\x01" * 31) + + def test_wrong_length_of_public_key(self): + with self.assertRaises(ValueError): + PublicKey(generator_ed25519, b"\x01" * 33) + + def test_wrong_cofactor_curve(self): + ed_c = curve_ed25519 + + def _hash(data): + return hashlib.new("sha512", compat26_str(data)).digest() + + curve = CurveEdTw(ed_c.p(), ed_c.a(), ed_c.d(), 1, _hash) + g = generator_ed25519 + fake_gen = PointEdwards(curve, g.x(), g.y(), 1, g.x() * g.y()) + + with self.assertRaises(ValueError) as e: + PrivateKey(fake_gen, g.to_bytes()) + + self.assertIn("cofactor", str(e.exception)) + + def test_invalid_signature_length(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + with self.assertRaises(ValueError) as e: + key.verify(b"", b"\x01" * 65) + + self.assertIn("length", str(e.exception)) + + def test_changing_public_key(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + g = key.point + + new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + key.point = new_g + + self.assertEqual(g, key.point) + + def test_changing_public_key_to_different_point(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + with self.assertRaises(ValueError) as e: + key.point = generator_ed25519 + + self.assertIn("coordinates", str(e.exception)) + + def test_invalid_s_value(self): + key = PublicKey( + generator_ed25519, + b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" + b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", + ) + sig_valid = bytearray( + b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" + b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" + b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" + b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" + ) + + self.assertTrue(key.verify(b"", sig_valid)) + + sig_invalid = bytearray(sig_valid) + sig_invalid[-1] = 0xFF + + with self.assertRaises(ValueError): + key.verify(b"", sig_invalid) + + def test_invalid_r_value(self): + key = PublicKey( + generator_ed25519, + b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" + b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", + ) + sig_valid = bytearray( + b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" + b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" + b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" + b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" + ) + + self.assertTrue(key.verify(b"", sig_valid)) + + sig_invalid = bytearray(sig_valid) + sig_invalid[0] = 0xE0 + + with self.assertRaises(ValueError): + key.verify(b"", sig_invalid) + + +HYP_SETTINGS = dict() +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 +else: + HYP_SETTINGS["max_examples"] = 10 + + +@settings(**HYP_SETTINGS) +@example(1) +@example(5) # smallest multiple that requires changing sign of x +@given(st.integers(min_value=1, max_value=int(generator_ed25519.order() - 1))) +def test_ed25519_encode_decode(multiple): + a = generator_ed25519 * multiple + + b = PointEdwards.from_bytes(curve_ed25519, a.to_bytes()) + + assert a == b + + +@settings(**HYP_SETTINGS) +@example(1) +@example(2) # smallest multiple that requires changing the sign of x +@given(st.integers(min_value=1, max_value=int(generator_ed448.order() - 1))) +def test_ed448_encode_decode(multiple): + a = generator_ed448 * multiple + + b = PointEdwards.from_bytes(curve_ed448, a.to_bytes()) + + assert a == b + + +@settings(**HYP_SETTINGS) +@example(1) +@example(2) +@given(st.integers(min_value=1, max_value=int(generator_ed25519.order()) - 1)) +def test_ed25519_mul_precompute_vs_naf(multiple): + """Compare multiplication with and without precomputation.""" + g = generator_ed25519 + new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + assert g * multiple == multiple * new_g + + +# Test vectors from RFC 8032 +TEST_VECTORS = [ + # TEST 1 + ( + generator_ed25519, + "9d61b19deffd5a60ba844af492ec2cc4" "4449c5697b326919703bac031cae7f60", + "d75a980182b10ab7d54bfed3c964073a" "0ee172f3daa62325af021a68f707511a", + "", + "e5564300c360ac729086e2cc806e828a" + "84877f1eb8e5d974d873e06522490155" + "5fb8821590a33bacc61e39701cf9b46b" + "d25bf5f0595bbe24655141438e7a100b", + ), + # TEST 2 + ( + generator_ed25519, + "4ccd089b28ff96da9db6c346ec114e0f" "5b8a319f35aba624da8cf6ed4fb8a6fb", + "3d4017c3e843895a92b70aa74d1b7ebc" "9c982ccf2ec4968cc0cd55f12af4660c", + "72", + "92a009a9f0d4cab8720e820b5f642540" + "a2b27b5416503f8fb3762223ebdb69da" + "085ac1e43e15996e458f3613d0f11d8c" + "387b2eaeb4302aeeb00d291612bb0c00", + ), + # TEST 3 + ( + generator_ed25519, + "c5aa8df43f9f837bedb7442f31dcb7b1" "66d38535076f094b85ce3a2e0b4458f7", + "fc51cd8e6218a1a38da47ed00230f058" "0816ed13ba3303ac5deb911548908025", + "af82", + "6291d657deec24024827e69c3abe01a3" + "0ce548a284743a445e3680d7db5ac3ac" + "18ff9b538d16f290ae67f760984dc659" + "4a7c15e9716ed28dc027beceea1ec40a", + ), + # TEST 1024 + ( + generator_ed25519, + "f5e5767cf153319517630f226876b86c" "8160cc583bc013744c6bf255f5cc0ee5", + "278117fc144c72340f67d0f2316e8386" "ceffbf2b2428c9c51fef7c597f1d426e", + "08b8b2b733424243760fe426a4b54908" + "632110a66c2f6591eabd3345e3e4eb98" + "fa6e264bf09efe12ee50f8f54e9f77b1" + "e355f6c50544e23fb1433ddf73be84d8" + "79de7c0046dc4996d9e773f4bc9efe57" + "38829adb26c81b37c93a1b270b20329d" + "658675fc6ea534e0810a4432826bf58c" + "941efb65d57a338bbd2e26640f89ffbc" + "1a858efcb8550ee3a5e1998bd177e93a" + "7363c344fe6b199ee5d02e82d522c4fe" + "ba15452f80288a821a579116ec6dad2b" + "3b310da903401aa62100ab5d1a36553e" + "06203b33890cc9b832f79ef80560ccb9" + "a39ce767967ed628c6ad573cb116dbef" + "efd75499da96bd68a8a97b928a8bbc10" + "3b6621fcde2beca1231d206be6cd9ec7" + "aff6f6c94fcd7204ed3455c68c83f4a4" + "1da4af2b74ef5c53f1d8ac70bdcb7ed1" + "85ce81bd84359d44254d95629e9855a9" + "4a7c1958d1f8ada5d0532ed8a5aa3fb2" + "d17ba70eb6248e594e1a2297acbbb39d" + "502f1a8c6eb6f1ce22b3de1a1f40cc24" + "554119a831a9aad6079cad88425de6bd" + "e1a9187ebb6092cf67bf2b13fd65f270" + "88d78b7e883c8759d2c4f5c65adb7553" + "878ad575f9fad878e80a0c9ba63bcbcc" + "2732e69485bbc9c90bfbd62481d9089b" + "eccf80cfe2df16a2cf65bd92dd597b07" + "07e0917af48bbb75fed413d238f5555a" + "7a569d80c3414a8d0859dc65a46128ba" + "b27af87a71314f318c782b23ebfe808b" + "82b0ce26401d2e22f04d83d1255dc51a" + "ddd3b75a2b1ae0784504df543af8969b" + "e3ea7082ff7fc9888c144da2af58429e" + "c96031dbcad3dad9af0dcbaaaf268cb8" + "fcffead94f3c7ca495e056a9b47acdb7" + "51fb73e666c6c655ade8297297d07ad1" + "ba5e43f1bca32301651339e22904cc8c" + "42f58c30c04aafdb038dda0847dd988d" + "cda6f3bfd15c4b4c4525004aa06eeff8" + "ca61783aacec57fb3d1f92b0fe2fd1a8" + "5f6724517b65e614ad6808d6f6ee34df" + "f7310fdc82aebfd904b01e1dc54b2927" + "094b2db68d6f903b68401adebf5a7e08" + "d78ff4ef5d63653a65040cf9bfd4aca7" + "984a74d37145986780fc0b16ac451649" + "de6188a7dbdf191f64b5fc5e2ab47b57" + "f7f7276cd419c17a3ca8e1b939ae49e4" + "88acba6b965610b5480109c8b17b80e1" + "b7b750dfc7598d5d5011fd2dcc5600a3" + "2ef5b52a1ecc820e308aa342721aac09" + "43bf6686b64b2579376504ccc493d97e" + "6aed3fb0f9cd71a43dd497f01f17c0e2" + "cb3797aa2a2f256656168e6c496afc5f" + "b93246f6b1116398a346f1a641f3b041" + "e989f7914f90cc2c7fff357876e506b5" + "0d334ba77c225bc307ba537152f3f161" + "0e4eafe595f6d9d90d11faa933a15ef1" + "369546868a7f3a45a96768d40fd9d034" + "12c091c6315cf4fde7cb68606937380d" + "b2eaaa707b4c4185c32eddcdd306705e" + "4dc1ffc872eeee475a64dfac86aba41c" + "0618983f8741c5ef68d3a101e8a3b8ca" + "c60c905c15fc910840b94c00a0b9d0", + "0aab4c900501b3e24d7cdf4663326a3a" + "87df5e4843b2cbdb67cbf6e460fec350" + "aa5371b1508f9f4528ecea23c436d94b" + "5e8fcd4f681e30a6ac00a9704a188a03", + ), + # TEST SHA(abc) + ( + generator_ed25519, + "833fe62409237b9d62ec77587520911e" "9a759cec1d19755b7da901b96dca3d42", + "ec172b93ad5e563bf4932c70e1245034" "c35467ef2efd4d64ebf819683467e2bf", + "ddaf35a193617abacc417349ae204131" + "12e6fa4e89a97ea20a9eeee64b55d39a" + "2192992a274fc1a836ba3c23a3feebbd" + "454d4423643ce80e2a9ac94fa54ca49f", + "dc2a4459e7369633a52b1bf277839a00" + "201009a3efbf3ecb69bea2186c26b589" + "09351fc9ac90b3ecfdfbc7c66431e030" + "3dca179c138ac17ad9bef1177331a704", + ), + # Blank + ( + generator_ed448, + "6c82a562cb808d10d632be89c8513ebf" + "6c929f34ddfa8c9f63c9960ef6e348a3" + "528c8a3fcc2f044e39a3fc5b94492f8f" + "032e7549a20098f95b", + "5fd7449b59b461fd2ce787ec616ad46a" + "1da1342485a70e1f8a0ea75d80e96778" + "edf124769b46c7061bd6783df1e50f6c" + "d1fa1abeafe8256180", + "", + "533a37f6bbe457251f023c0d88f976ae" + "2dfb504a843e34d2074fd823d41a591f" + "2b233f034f628281f2fd7a22ddd47d78" + "28c59bd0a21bfd3980ff0d2028d4b18a" + "9df63e006c5d1c2d345b925d8dc00b41" + "04852db99ac5c7cdda8530a113a0f4db" + "b61149f05a7363268c71d95808ff2e65" + "2600", + ), + # 1 octet + ( + generator_ed448, + "c4eab05d357007c632f3dbb48489924d" + "552b08fe0c353a0d4a1f00acda2c463a" + "fbea67c5e8d2877c5e3bc397a659949e" + "f8021e954e0a12274e", + "43ba28f430cdff456ae531545f7ecd0a" + "c834a55d9358c0372bfa0c6c6798c086" + "6aea01eb00742802b8438ea4cb82169c" + "235160627b4c3a9480", + "03", + "26b8f91727bd62897af15e41eb43c377" + "efb9c610d48f2335cb0bd0087810f435" + "2541b143c4b981b7e18f62de8ccdf633" + "fc1bf037ab7cd779805e0dbcc0aae1cb" + "cee1afb2e027df36bc04dcecbf154336" + "c19f0af7e0a6472905e799f1953d2a0f" + "f3348ab21aa4adafd1d234441cf807c0" + "3a00", + ), + # 11 octets + ( + generator_ed448, + "cd23d24f714274e744343237b93290f5" + "11f6425f98e64459ff203e8985083ffd" + "f60500553abc0e05cd02184bdb89c4cc" + "d67e187951267eb328", + "dcea9e78f35a1bf3499a831b10b86c90" + "aac01cd84b67a0109b55a36e9328b1e3" + "65fce161d71ce7131a543ea4cb5f7e9f" + "1d8b00696447001400", + "0c3e544074ec63b0265e0c", + "1f0a8888ce25e8d458a21130879b840a" + "9089d999aaba039eaf3e3afa090a09d3" + "89dba82c4ff2ae8ac5cdfb7c55e94d5d" + "961a29fe0109941e00b8dbdeea6d3b05" + "1068df7254c0cdc129cbe62db2dc957d" + "bb47b51fd3f213fb8698f064774250a5" + "028961c9bf8ffd973fe5d5c206492b14" + "0e00", + ), + # 12 octets + ( + generator_ed448, + "258cdd4ada32ed9c9ff54e63756ae582" + "fb8fab2ac721f2c8e676a72768513d93" + "9f63dddb55609133f29adf86ec9929dc" + "cb52c1c5fd2ff7e21b", + "3ba16da0c6f2cc1f30187740756f5e79" + "8d6bc5fc015d7c63cc9510ee3fd44adc" + "24d8e968b6e46e6f94d19b945361726b" + "d75e149ef09817f580", + "64a65f3cdedcdd66811e2915", + "7eeeab7c4e50fb799b418ee5e3197ff6" + "bf15d43a14c34389b59dd1a7b1b85b4a" + "e90438aca634bea45e3a2695f1270f07" + "fdcdf7c62b8efeaf00b45c2c96ba457e" + "b1a8bf075a3db28e5c24f6b923ed4ad7" + "47c3c9e03c7079efb87cb110d3a99861" + "e72003cbae6d6b8b827e4e6c143064ff" + "3c00", + ), + # 13 octets + ( + generator_ed448, + "7ef4e84544236752fbb56b8f31a23a10" + "e42814f5f55ca037cdcc11c64c9a3b29" + "49c1bb60700314611732a6c2fea98eeb" + "c0266a11a93970100e", + "b3da079b0aa493a5772029f0467baebe" + "e5a8112d9d3a22532361da294f7bb381" + "5c5dc59e176b4d9f381ca0938e13c6c0" + "7b174be65dfa578e80", + "64a65f3cdedcdd66811e2915e7", + "6a12066f55331b6c22acd5d5bfc5d712" + "28fbda80ae8dec26bdd306743c5027cb" + "4890810c162c027468675ecf645a8317" + "6c0d7323a2ccde2d80efe5a1268e8aca" + "1d6fbc194d3f77c44986eb4ab4177919" + "ad8bec33eb47bbb5fc6e28196fd1caf5" + "6b4e7e0ba5519234d047155ac727a105" + "3100", + ), + # 64 octets + ( + generator_ed448, + "d65df341ad13e008567688baedda8e9d" + "cdc17dc024974ea5b4227b6530e339bf" + "f21f99e68ca6968f3cca6dfe0fb9f4fa" + "b4fa135d5542ea3f01", + "df9705f58edbab802c7f8363cfe5560a" + "b1c6132c20a9f1dd163483a26f8ac53a" + "39d6808bf4a1dfbd261b099bb03b3fb5" + "0906cb28bd8a081f00", + "bd0f6a3747cd561bdddf4640a332461a" + "4a30a12a434cd0bf40d766d9c6d458e5" + "512204a30c17d1f50b5079631f64eb31" + "12182da3005835461113718d1a5ef944", + "554bc2480860b49eab8532d2a533b7d5" + "78ef473eeb58c98bb2d0e1ce488a98b1" + "8dfde9b9b90775e67f47d4a1c3482058" + "efc9f40d2ca033a0801b63d45b3b722e" + "f552bad3b4ccb667da350192b61c508c" + "f7b6b5adadc2c8d9a446ef003fb05cba" + "5f30e88e36ec2703b349ca229c267083" + "3900", + ), + # 256 octets + ( + generator_ed448, + "2ec5fe3c17045abdb136a5e6a913e32a" + "b75ae68b53d2fc149b77e504132d3756" + "9b7e766ba74a19bd6162343a21c8590a" + "a9cebca9014c636df5", + "79756f014dcfe2079f5dd9e718be4171" + "e2ef2486a08f25186f6bff43a9936b9b" + "fe12402b08ae65798a3d81e22e9ec80e" + "7690862ef3d4ed3a00", + "15777532b0bdd0d1389f636c5f6b9ba7" + "34c90af572877e2d272dd078aa1e567c" + "fa80e12928bb542330e8409f31745041" + "07ecd5efac61ae7504dabe2a602ede89" + "e5cca6257a7c77e27a702b3ae39fc769" + "fc54f2395ae6a1178cab4738e543072f" + "c1c177fe71e92e25bf03e4ecb72f47b6" + "4d0465aaea4c7fad372536c8ba516a60" + "39c3c2a39f0e4d832be432dfa9a706a6" + "e5c7e19f397964ca4258002f7c0541b5" + "90316dbc5622b6b2a6fe7a4abffd9610" + "5eca76ea7b98816af0748c10df048ce0" + "12d901015a51f189f3888145c03650aa" + "23ce894c3bd889e030d565071c59f409" + "a9981b51878fd6fc110624dcbcde0bf7" + "a69ccce38fabdf86f3bef6044819de11", + "c650ddbb0601c19ca11439e1640dd931" + "f43c518ea5bea70d3dcde5f4191fe53f" + "00cf966546b72bcc7d58be2b9badef28" + "743954e3a44a23f880e8d4f1cfce2d7a" + "61452d26da05896f0a50da66a239a8a1" + "88b6d825b3305ad77b73fbac0836ecc6" + "0987fd08527c1a8e80d5823e65cafe2a" + "3d00", + ), + # 1023 octets + ( + generator_ed448, + "872d093780f5d3730df7c212664b37b8" + "a0f24f56810daa8382cd4fa3f77634ec" + "44dc54f1c2ed9bea86fafb7632d8be19" + "9ea165f5ad55dd9ce8", + "a81b2e8a70a5ac94ffdbcc9badfc3feb" + "0801f258578bb114ad44ece1ec0e799d" + "a08effb81c5d685c0c56f64eecaef8cd" + "f11cc38737838cf400", + "6ddf802e1aae4986935f7f981ba3f035" + "1d6273c0a0c22c9c0e8339168e675412" + "a3debfaf435ed651558007db4384b650" + "fcc07e3b586a27a4f7a00ac8a6fec2cd" + "86ae4bf1570c41e6a40c931db27b2faa" + "15a8cedd52cff7362c4e6e23daec0fbc" + "3a79b6806e316efcc7b68119bf46bc76" + "a26067a53f296dafdbdc11c77f7777e9" + "72660cf4b6a9b369a6665f02e0cc9b6e" + "dfad136b4fabe723d2813db3136cfde9" + "b6d044322fee2947952e031b73ab5c60" + "3349b307bdc27bc6cb8b8bbd7bd32321" + "9b8033a581b59eadebb09b3c4f3d2277" + "d4f0343624acc817804728b25ab79717" + "2b4c5c21a22f9c7839d64300232eb66e" + "53f31c723fa37fe387c7d3e50bdf9813" + "a30e5bb12cf4cd930c40cfb4e1fc6225" + "92a49588794494d56d24ea4b40c89fc0" + "596cc9ebb961c8cb10adde976a5d602b" + "1c3f85b9b9a001ed3c6a4d3b1437f520" + "96cd1956d042a597d561a596ecd3d173" + "5a8d570ea0ec27225a2c4aaff26306d1" + "526c1af3ca6d9cf5a2c98f47e1c46db9" + "a33234cfd4d81f2c98538a09ebe76998" + "d0d8fd25997c7d255c6d66ece6fa56f1" + "1144950f027795e653008f4bd7ca2dee" + "85d8e90f3dc315130ce2a00375a318c7" + "c3d97be2c8ce5b6db41a6254ff264fa6" + "155baee3b0773c0f497c573f19bb4f42" + "40281f0b1f4f7be857a4e59d416c06b4" + "c50fa09e1810ddc6b1467baeac5a3668" + "d11b6ecaa901440016f389f80acc4db9" + "77025e7f5924388c7e340a732e554440" + "e76570f8dd71b7d640b3450d1fd5f041" + "0a18f9a3494f707c717b79b4bf75c984" + "00b096b21653b5d217cf3565c9597456" + "f70703497a078763829bc01bb1cbc8fa" + "04eadc9a6e3f6699587a9e75c94e5bab" + "0036e0b2e711392cff0047d0d6b05bd2" + "a588bc109718954259f1d86678a579a3" + "120f19cfb2963f177aeb70f2d4844826" + "262e51b80271272068ef5b3856fa8535" + "aa2a88b2d41f2a0e2fda7624c2850272" + "ac4a2f561f8f2f7a318bfd5caf969614" + "9e4ac824ad3460538fdc25421beec2cc" + "6818162d06bbed0c40a387192349db67" + "a118bada6cd5ab0140ee273204f628aa" + "d1c135f770279a651e24d8c14d75a605" + "9d76b96a6fd857def5e0b354b27ab937" + "a5815d16b5fae407ff18222c6d1ed263" + "be68c95f32d908bd895cd76207ae7264" + "87567f9a67dad79abec316f683b17f2d" + "02bf07e0ac8b5bc6162cf94697b3c27c" + "d1fea49b27f23ba2901871962506520c" + "392da8b6ad0d99f7013fbc06c2c17a56" + "9500c8a7696481c1cd33e9b14e40b82e" + "79a5f5db82571ba97bae3ad3e0479515" + "bb0e2b0f3bfcd1fd33034efc6245eddd" + "7ee2086ddae2600d8ca73e214e8c2b0b" + "db2b047c6a464a562ed77b73d2d841c4" + "b34973551257713b753632efba348169" + "abc90a68f42611a40126d7cb21b58695" + "568186f7e569d2ff0f9e745d0487dd2e" + "b997cafc5abf9dd102e62ff66cba87", + "e301345a41a39a4d72fff8df69c98075" + "a0cc082b802fc9b2b6bc503f926b65bd" + "df7f4c8f1cb49f6396afc8a70abe6d8a" + "ef0db478d4c6b2970076c6a0484fe76d" + "76b3a97625d79f1ce240e7c576750d29" + "5528286f719b413de9ada3e8eb78ed57" + "3603ce30d8bb761785dc30dbc320869e" + "1a00", + ), +] + + +@pytest.mark.parametrize( + "generator,private_key,public_key,message,signature", + TEST_VECTORS, +) +def test_vectors(generator, private_key, public_key, message, signature): + private_key = a2b_hex(private_key) + public_key = a2b_hex(public_key) + message = a2b_hex(message) + signature = a2b_hex(signature) + + sig_key = PrivateKey(generator, private_key) + ver_key = PublicKey(generator, public_key) + + assert sig_key.public_key().public_key() == ver_key.public_key() + + gen_sig = sig_key.sign(message) + + assert gen_sig == signature + + assert ver_key.verify(message, signature) diff --git a/src/ecdsa/test_ellipticcurve.py b/src/ecdsa/test_ellipticcurve.py index def53b2a..864cf108 100644 --- a/src/ecdsa/test_ellipticcurve.py +++ b/src/ecdsa/test_ellipticcurve.py @@ -1,5 +1,4 @@ import pytest -from six import print_ try: import unittest2 as unittest @@ -15,7 +14,7 @@ except ImportError: # pragma: no cover HC_PRESENT = False from .numbertheory import inverse_mod -from .ellipticcurve import CurveFp, INFINITY, Point +from .ellipticcurve import CurveFp, INFINITY, Point, CurveEdTw HYP_SETTINGS = {} @@ -41,11 +40,11 @@ HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) -HYP_SLOW_SETTINGS["max_examples"] = 10 +HYP_SLOW_SETTINGS["max_examples"] = 2 @settings(**HYP_SLOW_SETTINGS) -@given(st.integers(min_value=1, max_value=r + 1)) +@given(st.integers(min_value=1, max_value=r - 1)) def test_p192_mult_tests(multiple): inv_m = inverse_mod(multiple, r) @@ -84,6 +83,11 @@ def test_inequality_curves(self): c192 = CurveFp(p, -3, b) self.assertNotEqual(self.c_23, c192) + def test_inequality_curves_by_b_only(self): + a = CurveFp(23, 1, 0) + b = CurveFp(23, 1, 1) + self.assertNotEqual(a, b) + def test_usability_in_a_hashed_collection_curves(self): {self.c_23: None} @@ -96,7 +100,33 @@ def test_conflation_curves(self): self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1) self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4) self.assertDictEqual({c_23: None}, {eq1: None}) - self.assertTrue(eq2 in {eq3: None}) + self.assertIn(eq2, {eq3: None}) + + def test___str__(self): + self.assertEqual(str(self.c_23), "CurveFp(p=23, a=1, b=1)") + + def test___str___with_cofactor(self): + c = CurveFp(23, 1, 1, 4) + self.assertEqual(str(c), "CurveFp(p=23, a=1, b=1, h=4)") + + +class TestCurveEdTw(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveEdTw(23, 1, 1) + + def test___str__(self): + self.assertEqual(str(self.c_23), "CurveEdTw(p=23, a=1, d=1)") + + def test___str___with_cofactor(self): + c = CurveEdTw(23, 1, 1, 4) + self.assertEqual(str(c), "CurveEdTw(p=23, a=1, d=1, h=4)") + + def test_usability_in_a_hashed_collection_curves(self): + {self.c_23: None} + + def test_hashability_curves(self): + hash(self.c_23) class TestPoint(unittest.TestCase): @@ -159,6 +189,33 @@ def test_double(self): self.assertEqual(p3.x(), x3) self.assertEqual(p3.y(), y3) + def test_double_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1.double() + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2.double() + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_add_self_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1 + p1 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 + p2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_mul_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1 * 2 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 * 2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + def test_multiply(self): x1, y1, m, x3, y3 = (3, 10, 2, 7, 12) p1 = Point(self.c_23, x1, y1) @@ -198,3 +255,40 @@ def test_inequality_points(self): def test_inequality_points_diff_types(self): c = CurveFp(100, -3, 100) self.assertNotEqual(self.g_23, c) + + def test_inequality_diff_y(self): + p1 = Point(self.c_23, 6, 4) + p2 = Point(self.c_23, 6, 19) + + self.assertNotEqual(p1, p2) + + def test_to_bytes_from_bytes(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(p, Point.from_bytes(self.c_23, p.to_bytes())) + + def test_add_to_neg_self(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(INFINITY, p + (-p)) + + def test_add_to_infinity(self): + p = Point(self.c_23, 3, 10) + + self.assertIs(p, p + INFINITY) + + def test_mul_infinity_by_scalar(self): + self.assertIs(INFINITY, INFINITY * 10) + + def test_mul_by_negative(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(p * -5, (-p) * 5) + + def test_str_infinity(self): + self.assertEqual(str(INFINITY), "infinity") + + def test_str_point(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(str(p), "(3,10)") diff --git a/src/ecdsa/test_jacobi.py b/src/ecdsa/test_jacobi.py index 5494243d..f811b922 100644 --- a/src/ecdsa/test_jacobi.py +++ b/src/ecdsa/test_jacobi.py @@ -1,16 +1,43 @@ import pickle +import sys try: import unittest2 as unittest except ImportError: import unittest +import os +import signal +import pytest +import threading +import platform import hypothesis.strategies as st from hypothesis import given, assume, settings, example -from .ellipticcurve import CurveFp, Point, PointJacobi, INFINITY -from .ecdsa import generator_256, curve_256, generator_224 +from .ellipticcurve import CurveFp, PointJacobi, INFINITY, Point +from .ecdsa import ( + generator_256, + curve_256, + generator_224, + generator_brainpoolp160r1, + curve_brainpoolp160r1, + generator_112r2, + curve_112r2, +) from .numbertheory import inverse_mod +from .util import randrange + + +NO_OLD_SETTINGS = {} +if sys.version_info > (2, 7): # pragma: no branch + NO_OLD_SETTINGS["deadline"] = 5000 + + +SLOW_SETTINGS = {} +if "--fast" in sys.argv: # pragma: no cover + SLOW_SETTINGS["max_examples"] = 2 +else: + SLOW_SETTINGS["max_examples"] = 10 class TestJacobi(unittest.TestCase): @@ -31,7 +58,7 @@ def test_add_with_different_curves(self): p_a = PointJacobi.from_affine(generator_256) p_b = PointJacobi.from_affine(generator_224) - with self.assertRaises(ValueError): + with self.assertRaises(ValueError): # pragma: no branch p_a + p_b def test_compare_different_curves(self): @@ -65,14 +92,21 @@ def test_double_with_zero_point(self): self.assertIs(pj, INFINITY) def test_double_with_zero_equivalent_point(self): - pj = PointJacobi(curve_256, 0, curve_256.p(), 1) + pj = PointJacobi(curve_256, 0, 0, 0) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point_non_zero_z_non_zero_y(self): + pj = PointJacobi(curve_256, 0, 1, curve_256.p()) pj = pj.double() self.assertIs(pj, INFINITY) - def test_double_with_zero_equivalent_point_non_1_z(self): - pj = PointJacobi(curve_256, 0, curve_256.p(), 2) + def test_double_with_zero_equivalent_point_non_zero_z(self): + pj = PointJacobi(curve_256, 0, 0, curve_256.p()) pj = pj.double() @@ -86,7 +120,7 @@ def test_compare_with_affine_point(self): self.assertEqual(pa, pj) def test_to_affine_with_zero_point(self): - pj = PointJacobi(curve_256, 0, 0, 1) + pj = PointJacobi(curve_256, 0, 0, 0) pa = pj.to_affine() @@ -117,7 +151,7 @@ def test_add_with_infinity(self): def test_add_zero_point_to_affine(self): pa = PointJacobi.from_affine(generator_256).to_affine() - pj = PointJacobi(curve_256, 0, 0, 1) + pj = PointJacobi(curve_256, 0, 0, 0) s = pj + pa @@ -168,8 +202,35 @@ def test_compare_non_zero_with_infinity(self): self.assertNotEqual(pj, INFINITY) + def test_compare_non_zero_bad_scale_with_infinity(self): + pj = PointJacobi(curve_256, 1, 1, 0) + self.assertEqual(pj, INFINITY) + + def test_eq_x_0_on_curve_with_infinity(self): + c_23 = CurveFp(23, 1, 1) + pj = PointJacobi(c_23, 0, 1, 1) + + self.assertTrue(c_23.contains_point(0, 1)) + + self.assertNotEqual(pj, INFINITY) + + def test_eq_y_0_on_curve_with_infinity(self): + c_23 = CurveFp(23, 1, 1) + pj = PointJacobi(c_23, 4, 0, 1) + + self.assertTrue(c_23.contains_point(4, 0)) + + self.assertNotEqual(pj, INFINITY) + + def test_eq_with_same_x_different_y(self): + c_23 = CurveFp(23, 1, 1) + p_a = PointJacobi(c_23, 0, 22, 1) + p_b = PointJacobi(c_23, 0, 1, 1) + + self.assertNotEqual(p_a, p_b) + def test_compare_zero_point_with_infinity(self): - pj = PointJacobi(curve_256, 0, 0, 1) + pj = PointJacobi(curve_256, 0, 0, 0) self.assertEqual(pj, INFINITY) @@ -180,10 +241,14 @@ def test_compare_double_with_multiply(self): self.assertEqual(dbl, mlpl) - @settings(max_examples=10) - @given(st.integers(min_value=0, max_value=int(generator_256.order()))) + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) + ) + ) def test_multiplications(self, mul): - pj = PointJacobi.from_affine(generator_256) + pj = PointJacobi.from_affine(generator_brainpoolp160r1) pw = pj.to_affine() * mul pj = pj * mul @@ -191,27 +256,36 @@ def test_multiplications(self, mul): self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y())) self.assertEqual(pj, pw) - @settings(max_examples=10) - @given(st.integers(min_value=0, max_value=int(generator_256.order()))) + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) + ) + ) @example(0) - @example(int(generator_256.order())) + @example(int(generator_brainpoolp160r1.order())) def test_precompute(self, mul): - precomp = PointJacobi.from_affine(generator_256, True) - pj = PointJacobi.from_affine(generator_256) + precomp = generator_brainpoolp160r1 + self.assertTrue(precomp._PointJacobi__precompute) + pj = PointJacobi.from_affine(generator_brainpoolp160r1) a = precomp * mul b = pj * mul self.assertEqual(a, b) - @settings(max_examples=10) + @settings(**SLOW_SETTINGS) @given( - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(generator_256.order())), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), ) @example(3, 3) def test_add_scaled_points(self, a_mul, b_mul): - j_g = PointJacobi.from_affine(generator_256) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) @@ -219,69 +293,117 @@ def test_add_scaled_points(self, a_mul, b_mul): self.assertEqual(c, j_g * (a_mul + b_mul)) - @settings(max_examples=10) + @settings(**SLOW_SETTINGS) @given( - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(curve_256.p() - 1)), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), ) def test_add_one_scaled_point(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_256) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) - p = curve_256.p() + p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z, p)) new_zz = new_z * new_z % p b = PointJacobi( - curve_256, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, ) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) - @settings(max_examples=10) + @pytest.mark.slow + @settings(**SLOW_SETTINGS) @given( - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(curve_256.p() - 1)), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), ) @example(1, 1, 1) @example(3, 3, 3) - @example(2, int(generator_256.order() - 2), 1) - @example(2, int(generator_256.order() - 2), 3) + @example(2, int(generator_brainpoolp160r1.order() - 2), 1) + @example(2, int(generator_brainpoolp160r1.order() - 2), 3) def test_add_same_scale_points(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_256) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) - p = curve_256.p() + p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z, p)) new_zz = new_z * new_z % p a = PointJacobi( - curve_256, a.x() * new_zz % p, a.y() * new_zz * new_z % p, new_z + curve_brainpoolp160r1, + a.x() * new_zz % p, + a.y() * new_zz * new_z % p, + new_z, ) b = PointJacobi( - curve_256, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, ) c = a + b self.assertEqual(c, j_g * (a_mul + b_mul)) - @settings(max_examples=14) + def test_add_same_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + + c = a + a + + self.assertEqual(c, x + y) + + @pytest.mark.slow + @settings(**SLOW_SETTINGS) @given( - st.integers(min_value=1, max_value=int(generator_256.order())), - st.integers(min_value=1, max_value=int(generator_256.order())), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), st.lists( - st.integers(min_value=1, max_value=int(curve_256.p() - 1)), + st.integers( + min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1) + ), min_size=2, max_size=2, unique=True, @@ -289,14 +411,14 @@ def test_add_same_scale_points(self, a_mul, b_mul, new_z): ) @example(2, 2, [2, 1]) @example(2, 2, [2, 3]) - @example(2, int(generator_256.order() - 2), [2, 3]) - @example(2, int(generator_256.order() - 2), [2, 1]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1]) def test_add_different_scale_points(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_256) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) a = PointJacobi.from_affine(j_g * a_mul) b = PointJacobi.from_affine(j_g * b_mul) - p = curve_256.p() + p = curve_brainpoolp160r1.p() assume(inverse_mod(new_z[0], p)) assume(inverse_mod(new_z[1], p)) @@ -305,13 +427,13 @@ def test_add_different_scale_points(self, a_mul, b_mul, new_z): new_zz1 = new_z[1] * new_z[1] % p a = PointJacobi( - curve_256, + curve_brainpoolp160r1, a.x() * new_zz0 % p, a.y() * new_zz0 * new_z[0] % p, new_z[0], ) b = PointJacobi( - curve_256, + curve_brainpoolp160r1, b.x() * new_zz1 % p, b.y() * new_zz1 * new_z[1] % p, new_z[1], @@ -321,11 +443,112 @@ def test_add_different_scale_points(self, a_mul, b_mul, new_z): self.assertEqual(c, j_g * (a_mul + b_mul)) + def test_add_different_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + z2 = 29 + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z2**2 % p, + a.y() * z2**3 % p, + z2, + ) + + c = a + a + + self.assertEqual(c, x + y) + + def test_add_different_points_same_scale_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + b = j_g * 12 + z = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z**2 % p, + a.y() * z**3 % p, + z, + ) + y = PointJacobi( + curve_brainpoolp160r1, + b.x() * z**2 % p, + b.y() * z**3 % p, + z, + ) + + c = a + b + + self.assertEqual(c, x + y) + + def test_add_same_point_different_scale_second_z_1_static(self): + j_g = generator_112r2 + p = curve_112r2.p() + z = 11 + a = j_g * z + a.scale() + + x = PointJacobi( + curve_112r2, + a.x() * z**2 % p, + a.y() * z**3 % p, + z, + ) + y = PointJacobi( + curve_112r2, + a.x(), + a.y(), + 1, + ) + + c = a + a + + self.assertEqual(c, x + y) + + def test_add_to_infinity_static(self): + j_g = generator_112r2 + + z = 11 + a = j_g * z + a.scale() + + b = -a + + x = PointJacobi( + curve_112r2, + a.x(), + a.y(), + 1, + ) + y = PointJacobi( + curve_112r2, + b.x(), + b.y(), + 1, + ) + + self.assertEqual(INFINITY, x + y) + def test_add_point_3_times(self): j_g = PointJacobi.from_affine(generator_256) self.assertEqual(j_g * 3, j_g + j_g + j_g) + def test_mul_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + self.assertEqual(j_g * generator_256.order(), INFINITY) + def test_mul_add_inf(self): j_g = PointJacobi.from_affine(generator_256) @@ -337,7 +560,7 @@ def test_mul_add_same(self): self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1)) def test_mul_add_precompute(self): - j_g = PointJacobi.from_affine(generator_256, True) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) b = PointJacobi.from_affine(j_g * 255, True) self.assertEqual(j_g * 256, j_g + b) @@ -345,7 +568,7 @@ def test_mul_add_precompute(self): self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7)) def test_mul_add_precompute_large(self): - j_g = PointJacobi.from_affine(generator_256, True) + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) b = PointJacobi.from_affine(j_g * 255, True) self.assertEqual(j_g * 256, j_g + b) @@ -364,6 +587,21 @@ def test_mul_add_to_mul(self): self.assertEqual(a, b) + def test_mul_add_differnt(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + + self.assertEqual(j_g.mul_add(1, w_a, 1), j_g * 3) + + def test_mul_add_slightly_different(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + w_b = j_g * 3 + + self.assertEqual(w_a.mul_add(1, w_b, 3), w_a * 1 + w_b * 3) + def test_mul_add(self): j_g = PointJacobi.from_affine(generator_256) @@ -375,6 +613,18 @@ def test_mul_add(self): self.assertEqual(ret.to_affine(), w_a + w_b) + def test_mul_add_zero(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = generator_256 * 255 + w_b = generator_256 * (0 * 0xA8) + + j_b = j_g * 0xA8 + + ret = j_g.mul_add(255, j_b, 0) + + self.assertEqual(ret.to_affine(), w_a + w_b) + def test_mul_add_large(self): j_g = PointJacobi.from_affine(generator_256) b = PointJacobi.from_affine(j_g * 255) @@ -387,11 +637,298 @@ def test_mul_add_large(self): j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) ) + def test_mul_add_with_infinity_as_result(self): + j_g = PointJacobi.from_affine(generator_256) + + order = generator_256.order() + + b = PointJacobi.from_affine(generator_256 * 256) + + self.assertEqual(j_g.mul_add(order % 256, b, order // 256), INFINITY) + + def test_mul_add_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + order = generator_256.order() + + w_b = generator_256 * 34 + w_b.scale() + + b = PointJacobi(curve_256, w_b.x(), w_b.y(), 1) + + self.assertEqual(j_g.mul_add(order % 34, b, order // 34), INFINITY) + + def test_mul_add_with_doubled_negation_of_itself(self): + j_g = PointJacobi.from_affine(generator_256 * 17) + + dbl_neg = 2 * (-j_g) + + self.assertEqual(j_g.mul_add(4, dbl_neg, 2), INFINITY) + + @given( + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + ) + @example(693, 2, 3293) # values that will hit all the conditions for NAF + def test_mul_add_random(self, mul1, mul2, mul3): + p_a = PointJacobi.from_affine(generator_112r2) + p_b = generator_112r2 * mul2 + + res = p_a.mul_add(mul1, p_b, mul3) + + self.assertEqual(res, p_a * mul1 + p_b * mul3) + def test_equality(self): pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) self.assertEqual(pj1, pj2) + def test_equality_with_invalid_object(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(j_g, 12) + + def test_equality_with_wrong_curves(self): + p_a = PointJacobi.from_affine(generator_256) + p_b = PointJacobi.from_affine(generator_224) + + self.assertNotEqual(p_a, p_b) + + def test_add_with_point_at_infinity(self): + pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + x, y, z = pj1._add(2, 3, 1, 5, 5, 0, 23) + + self.assertEqual((x, y, z), (2, 3, 1)) + + def test_double_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p.double() + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2.double() + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_double_to_x_0(self): + c_23_2 = CurveFp(23, 1, 2) + p = PointJacobi(c_23_2, 9, 2, 1) + p2 = p.double() + + self.assertEqual((p2.x(), p2.y()), (0, 18)) + + def test_mul_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p * 2 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 * 2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_add_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p + p + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 + p2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_mul_to_x_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 13 + self.assertEqual((p2.x(), p2.y()), (0, 22)) + + def test_mul_to_y_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 14 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + + def test_add_to_x_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 12 + p + self.assertEqual((p2.x(), p2.y()), (0, 22)) + + def test_add_to_y_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 13 + p + self.assertEqual((p2.x(), p2.y()), (4, 0)) + + def test_add_diff_z_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + c = p * 20 + p * 8 + self.assertIs(c, INFINITY) + def test_pickle(self): pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) self.assertEqual(pickle.loads(pickle.dumps(pj)), pj) + + @pytest.mark.slow + @settings(**NO_OLD_SETTINGS) + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="threading on PyPy breaks coverage", + ) + @given(st.integers(min_value=1, max_value=10)) + def test_multithreading(self, thread_num): # pragma: no cover + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(10): + generator * randrange(order) + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + for t in threads: + t.start() + + runner(gen) + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) + + @pytest.mark.slow + @pytest.mark.skipif( + platform.system() == "Windows" + or platform.python_implementation() == "PyPy", + reason="there are no signals on Windows, and threading breaks coverage" + " on PyPy", + ) + def test_multithreading_with_interrupts(self): # pragma: no cover + thread_num = 10 + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(50): + generator * randrange(order) + + def interrupter(barrier_start, barrier_end, lock_exit): + # wait until MainThread can handle KeyboardInterrupt + barrier_start.release() + barrier_end.acquire() + os.kill(os.getpid(), signal.SIGINT) + lock_exit.release() + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + barrier_start = threading.Lock() + barrier_start.acquire() + barrier_end = threading.Lock() + barrier_end.acquire() + lock_exit = threading.Lock() + lock_exit.acquire() + + threads.append( + threading.Thread( + target=interrupter, + args=(barrier_start, barrier_end, lock_exit), + ) + ) + + for t in threads: + t.start() + + with self.assertRaises(KeyboardInterrupt): + # signal to interrupter that we can now handle the signal + barrier_start.acquire() + barrier_end.release() + runner(gen) + # use the lock to ensure we never go past the scope of + # assertRaises before the os.kill is called + lock_exit.acquire() + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) + + +class TestZeroCurve(unittest.TestCase): + """Tests with curve that has (0, 0) on the curve.""" + + def setUp(self): + self.curve = CurveFp(23, 1, 0) + + def test_zero_point_on_curve(self): + self.assertTrue(self.curve.contains_point(0, 0)) + + def test_double_to_0_0_point(self): + p = PointJacobi(self.curve, 1, 18, 1) + + d = p.double() + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_double_to_0_0_point_with_non_one_z(self): + z = 2 + p = PointJacobi(self.curve, 1 * z**2, 18 * z**3, z) + + d = p.double() + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_mul_to_0_0_point(self): + p = PointJacobi(self.curve, 11, 13, 1) + + d = p * 12 + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_double_of_0_0_point(self): + p = PointJacobi(self.curve, 0, 0, 1) + + d = p.double() + + self.assertIs(d, INFINITY) + + def test_compare_to_old_implementation(self): + p = PointJacobi(self.curve, 11, 13, 1) + p_c = Point(self.curve, 11, 13) + + for i in range(24): + self.assertEqual(p * i, p_c * i) diff --git a/src/ecdsa/test_keys.py b/src/ecdsa/test_keys.py index 82176c9d..348475e2 100644 --- a/src/ecdsa/test_keys.py +++ b/src/ecdsa/test_keys.py @@ -8,14 +8,26 @@ except NameError: buffer = memoryview +import os import array -import six -import sys import pytest import hashlib -from .keys import VerifyingKey, SigningKey -from .der import unpem +from .keys import ( + VerifyingKey, + SigningKey, + MalformedPointError, + BadSignatureError, +) +from .der import ( + unpem, + UnexpectedDER, + encode_sequence, + encode_oid, + encode_bitstring, + encode_integer, + encode_octet_string, +) from .util import ( sigencode_string, sigencode_der, @@ -24,7 +36,9 @@ sigdecode_der, sigdecode_strings, ) -from .curves import NIST256p +from .curves import NIST256p, Curve, BRAINPOOLP160r1, Ed25519, Ed448 +from .ellipticcurve import Point, PointJacobi, CurveFp, INFINITY +from .ecdsa import generator_brainpoolp160r1 class TestVerifyingKeyFromString(unittest.TestCase): @@ -113,6 +127,10 @@ def test_bytearray_compressed(self): self.assertEqual(self.vk.to_string(), vk.to_string()) + def test_ed25519_VerifyingKey_from_string_imported(self): + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"AAA", Ed25519) + class TestVerifyingKeyFromDer(unittest.TestCase): """ @@ -150,6 +168,55 @@ def setUpClass(cls): ) cls.vk2 = VerifyingKey.from_pem(key_str) + cls.sk2 = SigningKey.generate(vk.curve) + + def test_load_key_with_explicit_parameters(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + pk = VerifyingKey.from_pem(pub_key_str) + + pk_exp = VerifyingKey.from_string( + b"\x04\x8a\xf5\x52\x48\x18\xb3\x98\xe6\x6c\x57\x3b\x8a\xdd\x7f" + b"\x60\x8d\x97\x4f\xff\xc8\x95\xa1\x23\x30\xd6\x5f\xb7\x5a\x30" + b"\x8e\xaa\x41\x60\x7d\x84\xac\x91\xe4\xe1\x4e\x4f\xa7\x85\xaf" + b"\x5a\xad\x71\x98\x7c\x08\x66\x5b\x07\xec\x88\x38\x2a\x67\x9f" + b"\x09\xf4\x7a\x4a\x65", + curve=NIST256p, + ) + self.assertEqual(pk, pk_exp) + + def test_load_key_with_explicit_with_explicit_disabled(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + with self.assertRaises(UnexpectedDER): + VerifyingKey.from_pem( + pub_key_str, valid_curve_encodings=["named_curve"] + ) + + def test_load_key_with_disabled_format(self): + with self.assertRaises(MalformedPointError) as e: + VerifyingKey.from_der(self.key_bytes, valid_encodings=["raw"]) + + self.assertIn("enabled (raw) encodings", str(e.exception)) + def test_custom_hashfunc(self): vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256) @@ -193,13 +260,259 @@ def test_array_array_of_bytes_memoryview(self): self.assertEqual(self.vk.to_string(), vk.to_string()) def test_equality_on_verifying_keys(self): - self.assertEqual(self.vk, self.sk.get_verifying_key()) + self.assertTrue(self.vk == self.sk.get_verifying_key()) def test_inequality_on_verifying_keys(self): - self.assertNotEqual(self.vk, self.vk2) + self.assertFalse(self.vk == self.vk2) def test_inequality_on_verifying_keys_not_implemented(self): - self.assertNotEqual(self.vk, None) + self.assertFalse(self.vk == None) + + def test_VerifyingKey_inequality_on_same_curve(self): + self.assertNotEqual(self.vk, self.sk2.verifying_key) + + def test_SigningKey_inequality_on_same_curve(self): + self.assertNotEqual(self.sk, self.sk2) + + def test_inequality_on_wrong_types(self): + self.assertFalse(self.vk == self.sk) + + def test_from_public_point_old(self): + pj = self.vk.pubkey.point + point = Point(pj.curve(), pj.x(), pj.y()) + + vk = VerifyingKey.from_public_point(point, self.vk.curve) + + self.assertTrue(vk == self.vk) + + def test_ed25519_VerifyingKey_repr__(self): + sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) + string = repr(sk.verifying_key) + + self.assertEqual( + "VerifyingKey.from_string(" + "bytearray(b'K\\x0c\\xfbZH\\x8e\\x8c\\x8c\\x07\\xee\\xda\\xfb" + "\\xe1\\x97\\xcd\\x90\\x18\\x02\\x15h]\\xfe\\xbe\\xcbB\\xba\\xe6r" + "\\x10\\xae\\xf1P'), Ed25519, None)", + string, + ) + + def test_edwards_from_public_point(self): + point = Ed25519.generator + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_point(point, Ed25519) + + self.assertIn("incompatible with Edwards", str(e.exception)) + + def test_edwards_precompute_no_side_effect(self): + sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) + vk = sk.verifying_key + vk2 = VerifyingKey.from_string(vk.to_string(), Ed25519) + vk.precompute() + + self.assertEqual(vk, vk2) + + def test_parse_malfomed_eddsa_der_pubkey(self): + der_str = encode_sequence( + encode_sequence(encode_oid(*Ed25519.oid)), + encode_bitstring(bytes(Ed25519.generator.to_bytes()), 0), + encode_bitstring(b"\x00", 0), + ) + + with self.assertRaises(UnexpectedDER) as e: + VerifyingKey.from_der(der_str) + + self.assertIn("trailing junk after public key", str(e.exception)) + + def test_edwards_from_public_key_recovery(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_key_recovery(b"", b"", Ed25519) + + self.assertIn("unsupported for Edwards", str(e.exception)) + + def test_edwards_from_public_key_recovery_with_digest(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_key_recovery_with_digest( + b"", b"", Ed25519 + ) + + self.assertIn("unsupported for Edwards", str(e.exception)) + + def test_load_ed25519_from_pem(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + self.assertIsInstance(vk.curve, Curve) + self.assertIs(vk.curve, Ed25519) + + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk_2 = VerifyingKey.from_string(vk_str, Ed25519) + + self.assertEqual(vk, vk_2) + + def test_export_ed255_to_pem(self): + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk = VerifyingKey.from_string(vk_str, Ed25519) + + vk_pem = ( + b"-----BEGIN PUBLIC KEY-----\n" + b"MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + b"-----END PUBLIC KEY-----\n" + ) + + self.assertEqual(vk_pem, vk.to_pem()) + + def test_export_ed255_to_ssh(self): + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk = VerifyingKey.from_string(vk_str, Ed25519) + + vk_ssh = b"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstl\n" + + self.assertEqual(vk_ssh, vk.to_ssh()) + + def test_ed25519_export_import(self): + sk = SigningKey.generate(Ed25519) + vk = sk.verifying_key + + vk2 = VerifyingKey.from_pem(vk.to_pem()) + + self.assertEqual(vk, vk2) + + def test_ed25519_sig_verify(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + data = b"data\n" + + # signature created by OpenSSL 3.0.0 beta1 + sig = ( + b"\x64\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" + b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" + b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" + b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" + ) + + self.assertTrue(vk.verify(sig, data)) + + def test_ed25519_sig_verify_malformed(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + data = b"data\n" + + # modified signature from test_ed25519_sig_verify + sig = ( + b"\xAA\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" + b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" + b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" + b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" + ) + + with self.assertRaises(BadSignatureError): + vk.verify(sig, data) + + def test_ed448_from_pem(self): + pem_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" + "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(pem_str) + + self.assertIsInstance(vk.curve, Curve) + self.assertIs(vk.curve, Ed448) + + vk_str = ( + b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" + b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" + b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" + b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" + ) + + vk2 = VerifyingKey.from_string(vk_str, Ed448) + + self.assertEqual(vk, vk2) + + def test_ed448_to_pem(self): + vk_str = ( + b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" + b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" + b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" + b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" + ) + vk = VerifyingKey.from_string(vk_str, Ed448) + + vk_pem = ( + b"-----BEGIN PUBLIC KEY-----\n" + b"MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0dTdYD2ll94g5\n" + b"8MhSnBiBQB9A1MMA\n" + b"-----END PUBLIC KEY-----\n" + ) + + self.assertEqual(vk_pem, vk.to_pem()) + + def test_ed448_export_import(self): + sk = SigningKey.generate(Ed448) + vk = sk.verifying_key + + vk2 = VerifyingKey.from_pem(vk.to_pem()) + + self.assertEqual(vk, vk2) + + def test_ed448_sig_verify(self): + pem_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" + "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(pem_str) + + data = b"data\n" + + # signature created by OpenSSL 3.0.0 beta1 + sig = ( + b"\x68\xed\x2c\x70\x35\x22\xca\x1c\x35\x03\xf3\xaa\x51\x33\x3d\x00" + b"\xc0\xae\xb0\x54\xc5\xdc\x7f\x6f\x30\x57\xb4\x1d\xcb\xe9\xec\xfa" + b"\xc8\x45\x3e\x51\xc1\xcb\x60\x02\x6a\xd0\x43\x11\x0b\x5f\x9b\xfa" + b"\x32\x88\xb2\x38\x6b\xed\xac\x09\x00\x78\xb1\x7b\x5d\x7e\xf8\x16" + b"\x31\xdd\x1b\x3f\x98\xa0\xce\x19\xe7\xd8\x1c\x9f\x30\xac\x2f\xd4" + b"\x1e\x55\xbf\x21\x98\xf6\x4c\x8c\xbe\x81\xa5\x2d\x80\x4c\x62\x53" + b"\x91\xd5\xee\x03\x30\xc6\x17\x66\x4b\x9e\x0c\x8d\x40\xd0\xad\xae" + b"\x0a\x00" + ) + + self.assertTrue(vk.verify(sig, data)) class TestSigningKey(unittest.TestCase): @@ -237,6 +550,61 @@ def setUpClass(cls): ) cls.sk2 = SigningKey.from_pem(prv_key_str) + def test_to_der_pkcs8(self): + self.assertEqual( + self.sk1.to_der(format="pkcs8"), + b"0o\x02\x01\x010\x13\x06\x07*\x86H\xce=\x02\x01\x06\x08*\x86H" + b"\xce=\x03\x01\x01\x04U0S\x02\x01\x01\x04\x18^\xc8B\x0b\xd6\xef" + b"\x92R\xa9B\xe9\x89\x04<\xa2\x9fV\x1f\xa5%w\x0e\xb1\xc5\xa14\x03" + b"2\x00\x04\xb8\x81w\xd0\x84\xef\x17\xf5\xe4V9@\x80(6\x0f\x9fY" + b"\xb4\xa4\xd7&Nb\xda\x06Q\xdc\xe4z5\xa4\xc5\xb4\\\xf5\x15\x93B:" + b"\x8bU{\x9c \x99\xf3l", + ) + + def test_decoding_explicit_curve_parameters(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(prv_key_str) + + sk2 = SigningKey.from_string( + b"\x21\x7b\x51\x11\xf5\x26\x47\x5e\xab\x65\xb9\xaf\x0c\x60\xf6" + b"\x94\x01\x05\x3d\x96\xb6\x0c\xb3\xf2\xcf\x47\x33\x4a\x36\xb9" + b"\xf3\x20", + curve=NIST256p, + ) + + self.assertEqual(sk, sk2) + + def test_decoding_explicit_curve_parameters_with_explicit_disabled(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + with self.assertRaises(UnexpectedDER): + SigningKey.from_pem( + prv_key_str, valid_curve_encodings=["named_curve"] + ) + def test_equality_on_signing_keys(self): sk = SigningKey.from_secret_exponent( self.sk1.privkey.secret_multiplier, self.sk1.curve @@ -244,12 +612,307 @@ def test_equality_on_signing_keys(self): self.assertEqual(self.sk1, sk) self.assertEqual(self.sk1_pkcs8, sk) + def test_verify_with_empty_message(self): + sig = self.sk1.sign(b"") + + self.assertTrue(sig) + + vk = self.sk1.verifying_key + + self.assertTrue(vk.verify(sig, b"")) + + def test_verify_with_precompute(self): + sig = self.sk1.sign(b"message") + + vk = self.sk1.verifying_key + + vk.precompute() + + self.assertTrue(vk.verify(sig, b"message")) + + def test_compare_verifying_key_with_precompute(self): + vk1 = self.sk1.verifying_key + vk1.precompute() + + vk2 = self.sk1_pkcs8.verifying_key + + self.assertEqual(vk1, vk2) + + def test_verify_with_lazy_precompute(self): + sig = self.sk2.sign(b"other message") + + vk = self.sk2.verifying_key + + vk.precompute(lazy=True) + + self.assertTrue(vk.verify(sig, b"other message")) + def test_inequality_on_signing_keys(self): self.assertNotEqual(self.sk1, self.sk2) def test_inequality_on_signing_keys_not_implemented(self): self.assertNotEqual(self.sk1, None) + def test_ed25519_from_pem(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(pem_str) + + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + self.assertEqual(sk, sk_str) + + def test_ed25519_from_der_bad_alg_id_params(self): + der_str = encode_sequence( + encode_integer(1), + encode_sequence(encode_oid(*Ed25519.oid), encode_integer(1)), + encode_octet_string(encode_octet_string(b"A" * 32)), + ) + + with self.assertRaises(UnexpectedDER) as e: + SigningKey.from_der(der_str) + + self.assertIn("Non NULL parameters", str(e.exception)) + + def test_ed25519_from_der_junk_after_priv_key(self): + der_str = encode_sequence( + encode_integer(1), + encode_sequence( + encode_oid(*Ed25519.oid), + ), + encode_octet_string(encode_octet_string(b"A" * 32) + b"B"), + ) + + with self.assertRaises(UnexpectedDER) as e: + SigningKey.from_der(der_str) + + self.assertIn( + "trailing junk after the encoded private key", str(e.exception) + ) + + def test_ed25519_sign(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + msg = b"message" + + sig = sk_str.sign(msg, sigencode=sigencode_der) + + self.assertEqual( + sig, + b"\xe1,v\xc9>%\xda\xd2~>\xc3&\na\xf4@|\x9e`X\x11\x13@<\x987\xd4" + b"\r\xb1\xf5\xb3\x15\x7f%i{\xdf}\xdd\xb1\xf3\x02\x7f\x80\x02\xc2" + b'|\xe5\xd6\x06\xc4\n\xa3\xb0\xf6}\xc0\xed)"+E\xaf\x00', + ) + + def test_ed25519_sign_digest_deterministic(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_digest_deterministic(b"a" * 20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_sign_digest(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_digest(b"a" * 20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_sign_number(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_number(20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_to_der_ssleay(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(pem_str) + + with self.assertRaises(ValueError) as e: + sk.to_der(format="ssleay") + + self.assertIn("Only PKCS#8 format", str(e.exception)) + + def test_ed25519_to_pem(self): + sk = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + pem_str = ( + b"-----BEGIN PRIVATE KEY-----\n" + b"MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + b"-----END PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) + + def test_ed25519_to_ssh(self): + sk = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + ssh_str = ( + b"-----BEGIN OPENSSH PRIVATE KEY-----\n" + b"b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUx\n" + b"OQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAIgAAAAAAAAAAAAAAAtzc2gt\n" + b"ZWQyNTUxOQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAEA0usfRTtTxvE+M\n" + b"SD4PGXdM/Li+rFRmRRGa19e4Bwv11CMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstlAAAA\n" + b"AAECAwQF\n" + b"-----END OPENSSH PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_ssh(), ssh_str) + + def test_ed25519_to_and_from_pem(self): + sk = SigningKey.generate(Ed25519) + + decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) + + self.assertEqual(sk, decoded) + + def test_ed25519_custom_entropy(self): + sk = SigningKey.generate(Ed25519, entropy=os.urandom) + + self.assertIsNotNone(sk) + + def test_ed25519_from_secret_exponent(self): + with self.assertRaises(ValueError) as e: + SigningKey.from_secret_exponent(1234567890, curve=Ed25519) + + self.assertIn("don't support setting the secret", str(e.exception)) + + def test_ed448_from_pem(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmP\n" + "OP0JMYaLGlTzwovmvCDJ2zLaezu9NLz9aQ==\n" + "-----END PRIVATE KEY-----\n" + ) + sk = SigningKey.from_pem(pem_str) + + sk_str = SigningKey.from_string( + b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" + b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" + b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" + b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", + Ed448, + ) + + self.assertEqual(sk, sk_str) + + def test_ed448_to_pem(self): + sk = SigningKey.from_string( + b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" + b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" + b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" + b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", + Ed448, + ) + pem_str = ( + b"-----BEGIN PRIVATE KEY-----\n" + b"MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmPOP0JMYaLGlTz\n" + b"wovmvCDJ2zLaezu9NLz9aQ==\n" + b"-----END PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) + + def test_ed448_encode_decode(self): + sk = SigningKey.generate(Ed448) + + decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) + + self.assertEqual(decoded, sk) + + +class TestTrivialCurve(unittest.TestCase): + @classmethod + def setUpClass(cls): + # To test what happens with r or s in signing happens to be zero we + # need to find a scalar that creates one of the points on a curve that + # has x coordinate equal to zero. + # Even for secp112r2 curve that's non trivial so use this toy + # curve, for which we can iterate over all points quickly + curve = CurveFp(163, 84, 58) + gen = PointJacobi(curve, 2, 87, 1, 167, generator=True) + + cls.toy_curve = Curve("toy_p8", curve, gen, (1, 2, 0)) + + cls.sk = SigningKey.from_secret_exponent( + 140, + cls.toy_curve, + hashfunc=hashlib.sha1, + ) + + def test_generator_sanity(self): + gen = self.toy_curve.generator + + self.assertEqual(gen * gen.order(), INFINITY) + + def test_public_key_sanity(self): + self.assertEqual(self.sk.verifying_key.to_string(), b"\x98\x1e") + + def test_deterministic_sign(self): + sig = self.sk.sign_deterministic(b"message") + + self.assertEqual(sig, b"-.") + + self.assertTrue(self.sk.verifying_key.verify(sig, b"message")) + + def test_deterministic_sign_random_message(self): + msg = os.urandom(32) + sig = self.sk.sign_deterministic(msg) + self.assertEqual(len(sig), 2) + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_R_zero_error(self): + # the raised RSZeroError is caught and handled internally by + # sign_deterministic methods + msg = b"\x00\x4f" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x36\x9e") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_S_zero_error(self): + msg = b"\x01\x6d" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x49\x6c") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + # test VerifyingKey.verify() prv_key_str = ( @@ -285,8 +948,8 @@ def test_inequality_on_signing_keys_not_implemented(self): verifiers = [] for modifier, fun in [ ("bytes", lambda x: x), - ("bytes memoryview", lambda x: buffer(x)), - ("bytearray", lambda x: bytearray(x)), + ("bytes memoryview", buffer), + ("bytearray", bytearray), ("bytearray memoryview", lambda x: buffer(bytearray(x))), ("array.array of bytes", lambda x: array.array("B", x)), ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), @@ -426,3 +1089,50 @@ def test_SigningKey_with_unlikely_value(): vk = sk.verifying_key sig = sk.sign(b"hello") assert vk.verify(sig, b"hello") + + +def test_SigningKey_with_custom_curve_old_point(): + generator = generator_brainpoolp160r1 + generator = Point( + generator.curve(), + generator.x(), + generator.y(), + generator.order(), + ) + + curve = Curve( + "BRAINPOOLP160r1", + generator.curve(), + generator, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), + ) + + sk = SigningKey.from_secret_exponent(12, curve) + + sk2 = SigningKey.from_secret_exponent(12, BRAINPOOLP160r1) + + assert sk.privkey == sk2.privkey + + +def test_VerifyingKey_inequality_with_different_curves(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(2, NIST256p) + + assert not (sk1.verifying_key == sk2.verifying_key) + + +def test_VerifyingKey_inequality_with_different_secret_points(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(3, BRAINPOOLP160r1) + + assert not (sk1.verifying_key == sk2.verifying_key) + + +def test_SigningKey_from_pem_pkcs8v2_EdDSA(): + pem = """-----BEGIN PRIVATE KEY----- + MFMCAQEwBQYDK2VwBCIEICc2F2ag1n1QP0jY+g9qWx5sDkx0s/HdNi3cSRHw+zsI + oSMDIQA+HQ2xCif8a/LMWR2m5HaCm5I2pKe/cc8OiRANMHxjKQ== + -----END PRIVATE KEY-----""" + + sk = SigningKey.from_pem(pem) + assert sk.curve == Ed25519 diff --git a/src/ecdsa/test_malformed_sigs.py b/src/ecdsa/test_malformed_sigs.py index 4895ceab..e5a87c28 100644 --- a/src/ecdsa/test_malformed_sigs.py +++ b/src/ecdsa/test_malformed_sigs.py @@ -13,10 +13,17 @@ "sha384", "sha512", ] +# skip algorithms broken by change to OpenSSL 3.0 and early versions +# of hashlib that list algorithms that require the legacy provider to work +# https://bugs.python.org/issue38820 +algorithms_available = [ + i + for i in algorithms_available + if i not in ("mdc2", "md2", "md4", "whirlpool", "ripemd160") +] from functools import partial import pytest import sys -from six import binary_type import hypothesis.strategies as st from hypothesis import note, assume, given, settings, example @@ -24,7 +31,7 @@ from .keys import BadSignatureError from .util import sigencode_der, sigencode_string from .util import sigdecode_der, sigdecode_string -from .curves import curves, NIST256p +from .curves import curves, SECP112r2, SECP128r1 from .der import ( encode_integer, encode_bitstring, @@ -33,6 +40,7 @@ encode_sequence, encode_constructed, ) +from .ellipticcurve import CurveEdTw example_data = b"some data to sign" @@ -47,6 +55,10 @@ bigger than order sizes of curves.""" +if "--fast" in sys.argv: # pragma: no cover + curves = [SECP112r2, SECP128r1] + + keys_and_sigs = [] """Name of the curve+hash combination, VerifyingKey and DER signature.""" @@ -83,7 +95,7 @@ def test_signatures(verifying_key, signature): @st.composite -def st_fuzzed_sig(draw, keys_and_sigs): +def st_fuzzed_sig(draw, keys_and_sigs): # pragma: no cover """ Hypothesis strategy that generates pairs of VerifyingKey and malformed signatures created by fuzzing of a valid signature. @@ -103,6 +115,7 @@ def st_fuzzed_sig(draw, keys_and_sigs): note("Remove bytes: {0}".format(to_remove)) # decide which bytes of the original signature should be changed + xors = None if sig: # pragma: no branch xors = draw( st.dictionaries( @@ -145,12 +158,17 @@ def st_fuzzed_sig(draw, keys_and_sigs): HealthCheck.filter_too_much, HealthCheck.too_slow, ] +if "--fast" in sys.argv: # pragma: no cover + params["max_examples"] = 20 slow_params = dict(params) -slow_params["max_examples"] = 10 +if "--fast" in sys.argv: # pragma: no cover + slow_params["max_examples"] = 1 +else: + slow_params["max_examples"] = 10 -@settings(**params) +@settings(**slow_params) @given(st_fuzzed_sig(keys_and_sigs)) def test_fuzzed_der_signatures(args): verifying_key, sig = args @@ -160,7 +178,7 @@ def test_fuzzed_der_signatures(args): @st.composite -def st_random_der_ecdsa_sig_value(draw): +def st_random_der_ecdsa_sig_value(draw): # pragma: no cover """ Hypothesis strategy for selecting random values and encoding them to ECDSA-Sig-Value object:: @@ -174,7 +192,7 @@ def st_random_der_ecdsa_sig_value(draw): note("Configuration: {0}".format(name)) order = int(verifying_key.curve.order) - # the encode_integer doesn't suport negative numbers, would be nice + # the encode_integer doesn't support negative numbers, would be nice # to generate them too, but we have coverage for remove_integer() # verifying that it doesn't accept them, so meh. # Test all numbers around the ones that can show up (around order) @@ -206,7 +224,7 @@ def test_random_der_ecdsa_sig_value(params): verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) -def st_der_integer(*args, **kwargs): +def st_der_integer(*args, **kwargs): # pragma: no cover """ Hypothesis strategy that returns a random positive integer as DER INTEGER. @@ -218,7 +236,7 @@ def st_der_integer(*args, **kwargs): @st.composite -def st_der_bit_string(draw, *args, **kwargs): +def st_der_bit_string(draw, *args, **kwargs): # pragma: no cover """ Hypothesis strategy that returns a random DER BIT STRING. Parameters are passed to hypothesis.strategy.binary. @@ -227,14 +245,14 @@ def st_der_bit_string(draw, *args, **kwargs): if data: unused = draw(st.integers(min_value=0, max_value=7)) data = bytearray(data) - data[-1] &= -(2 ** unused) + data[-1] &= -(2**unused) data = bytes(data) else: unused = 0 return encode_bitstring(data, unused) -def st_der_octet_string(*args, **kwargs): +def st_der_octet_string(*args, **kwargs): # pragma: no cover """ Hypothesis strategy that returns a random DER OCTET STRING object. Parameters are passed to hypothesis.strategy.binary @@ -242,7 +260,7 @@ def st_der_octet_string(*args, **kwargs): return st.builds(encode_octet_string, st.binary(*args, **kwargs)) -def st_der_null(): +def st_der_null(): # pragma: no cover """ Hypothesis strategy that returns DER NULL object. """ @@ -250,7 +268,7 @@ def st_der_null(): @st.composite -def st_der_oid(draw): +def st_der_oid(draw): # pragma: no cover """ Hypothesis strategy that returns DER OBJECT IDENTIFIER objects. """ @@ -258,14 +276,14 @@ def st_der_oid(draw): if first < 2: second = draw(st.integers(min_value=0, max_value=39)) else: - second = draw(st.integers(min_value=0, max_value=2 ** 512)) + second = draw(st.integers(min_value=0, max_value=2**512)) rest = draw( - st.lists(st.integers(min_value=0, max_value=2 ** 512), max_size=50) + st.lists(st.integers(min_value=0, max_value=2**512), max_size=50) ) return encode_oid(first, second, *rest) -def st_der(): +def st_der(): # pragma: no cover """ Hypothesis strategy that returns random DER structures. @@ -273,22 +291,20 @@ def st_der(): of a valid DER structure, sequence of valid DER objects or a constructed encoding of any of the above. """ - return st.recursive( + return st.recursive( # pragma: no branch st.just(b"") - | st_der_integer(max_value=2 ** 4096) - | st_der_bit_string(max_size=1024 ** 2) - | st_der_octet_string(max_size=1024 ** 2) + | st_der_integer(max_value=2**4096) + | st_der_bit_string(max_size=1024**2) + | st_der_octet_string(max_size=1024**2) | st_der_null() | st_der_oid(), - lambda children: st.builds( - lambda x: encode_octet_string(x), st.one_of(children) - ) + lambda children: st.builds(encode_octet_string, st.one_of(children)) | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) | st.builds( lambda x: encode_sequence(*x), st.lists(children, max_size=200) ) | st.builds( - lambda tag, x: encode_constructed(tag, x), + encode_constructed, st.integers(min_value=0, max_value=0x3F), st.one_of(children), ), @@ -296,7 +312,7 @@ def st_der(): ) -@settings(**params) +@settings(**slow_params) @given(st.sampled_from(keys_and_sigs), st_der()) def test_random_der_as_signature(params, der): """Check if random DER structures are rejected as signature""" @@ -306,8 +322,8 @@ def test_random_der_as_signature(params, der): verifying_key.verify(der, example_data, sigdecode=sigdecode_der) -@settings(**params) -@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024 ** 2)) +@settings(**slow_params) +@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024**2)) @example( keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0)) ) @@ -334,6 +350,7 @@ def test_random_bytes_as_signature(params, der): ), ) for name, verifying_key, sig in keys_and_sigs + if not isinstance(verifying_key.curve.curve, CurveEdTw) ] """ Name of the curve+hash combination, VerifyingKey and signature as a @@ -341,7 +358,18 @@ def test_random_bytes_as_signature(params, der): """ -@settings(**params) +keys_and_string_sigs += [ + ( + name, + verifying_key, + sig, + ) + for name, verifying_key, sig in keys_and_sigs + if isinstance(verifying_key.curve.curve, CurveEdTw) +] + + +@settings(**slow_params) @given(st_fuzzed_sig(keys_and_string_sigs)) def test_fuzzed_string_signatures(params): verifying_key, sig = params diff --git a/src/ecdsa/test_numbertheory.py b/src/ecdsa/test_numbertheory.py index 4912c578..966eca29 100644 --- a/src/ecdsa/test_numbertheory.py +++ b/src/ecdsa/test_numbertheory.py @@ -1,7 +1,6 @@ import operator -from six import print_ from functools import reduce -import operator +import sys try: import unittest2 as unittest @@ -19,6 +18,7 @@ HC_PRESENT = False from .numbertheory import ( SquareRootError, + JacobiError, factorization, gcd, lcm, @@ -30,6 +30,16 @@ square_root_mod_prime, ) +try: + from gmpy2 import mpz +except ImportError: + try: + from gmpy import mpz + except ImportError: + + def mpz(x): + return x + BIGPRIMES = ( 999671, @@ -67,6 +77,7 @@ def test_next_prime_with_nums_less_2(val): assert next_prime(val) == 2 +@pytest.mark.slow @pytest.mark.parametrize("prime", smallprimes) def test_square_root_mod_prime_for_small_primes(prime): squares = set() @@ -84,11 +95,61 @@ def test_square_root_mod_prime_for_small_primes(prime): square_root_mod_prime(nonsquare, prime) +def test_square_root_mod_prime_for_2(): + a = square_root_mod_prime(1, 2) + assert a == 1 + + +def test_square_root_mod_prime_for_small_prime(): + root = square_root_mod_prime(98**2 % 101, 101) + assert root * root % 101 == 9 + + +def test_square_root_mod_prime_for_p_congruent_5(): + p = 13 + assert p % 8 == 5 + + root = square_root_mod_prime(3, p) + assert root * root % p == 3 + + +def test_square_root_mod_prime_for_p_congruent_5_large_d(): + p = 29 + assert p % 8 == 5 + + root = square_root_mod_prime(4, p) + assert root * root % p == 4 + + +class TestSquareRootModPrime(unittest.TestCase): + def test_power_of_2_p(self): + with self.assertRaises(JacobiError): + square_root_mod_prime(12, 32) + + def test_no_square(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(12, 31) + + self.assertIn("no square root", str(e.exception)) + + def test_non_prime(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(12, 33) + + self.assertIn("p is not prime", str(e.exception)) + + def test_non_prime_with_negative(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(697 - 1, 697) + + self.assertIn("p is not prime", str(e.exception)) + + @st.composite def st_two_nums_rel_prime(draw): # 521-bit is the biggest curve we operate on, use 1024 for a bit # of breathing space - mod = draw(st.integers(min_value=2, max_value=2 ** 1024)) + mod = draw(st.integers(min_value=2, max_value=2**1024)) num = draw( st.integers(min_value=1, max_value=mod - 1).filter( lambda x: gcd(x, mod) == 1 @@ -110,7 +171,7 @@ def st_primes(draw, *args, **kwargs): @st.composite def st_num_square_prime(draw): - prime = draw(st_primes(max_value=2 ** 1024)) + prime = draw(st_primes(max_value=2**1024)) num = draw(st.integers(min_value=0, max_value=1 + prime // 2)) sq = num * num % prime return sq, prime @@ -122,7 +183,7 @@ def st_comp_with_com_fac(draw): Strategy that returns lists of numbers, all having a common factor. """ primes = draw( - st.lists(st_primes(max_value=2 ** 512), min_size=1, max_size=10) + st.lists(st_primes(max_value=2**512), min_size=1, max_size=10) ) # select random prime(s) that will make the common factor of composites com_fac_primes = draw( @@ -133,7 +194,7 @@ def st_comp_with_com_fac(draw): # select at most 20 lists (returned numbers), # each having at most 30 primes (factors) including none (then the number # will be 1) - comp_primes = draw( + comp_primes = draw( # pragma: no branch st.integers(min_value=1, max_value=20).flatmap( lambda n: st.lists( st.lists(st.sampled_from(primes), max_size=30), @@ -153,7 +214,7 @@ def st_comp_no_com_fac(draw): """ primes = draw( st.lists( - st_primes(max_value=2 ** 512), min_size=2, max_size=10, unique=True + st_primes(max_value=2**512), min_size=2, max_size=10, unique=True ) ) # first select the primes that will create the uncommon factor @@ -176,7 +237,7 @@ def st_comp_no_com_fac(draw): # select at most 20 lists, each having at most 30 primes # selected from the leftover_primes list - number_primes = draw( + number_primes = draw( # pragma: no branch st.integers(min_value=1, max_value=20).flatmap( lambda n: st.lists( st.lists(st.sampled_from(leftover_primes), max_size=30), @@ -202,9 +263,70 @@ def st_comp_no_com_fac(draw): # the factorization() sometimes takes a long time to finish HYP_SETTINGS["deadline"] = 5000 +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 20 + HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) -HYP_SLOW_SETTINGS["max_examples"] = 10 +if "--fast" in sys.argv: # pragma: no cover + HYP_SLOW_SETTINGS["max_examples"] = 1 +else: + HYP_SLOW_SETTINGS["max_examples"] = 20 + + +class TestIsPrime(unittest.TestCase): + def test_very_small_prime(self): + assert is_prime(23) + + def test_very_small_composite(self): + assert not is_prime(22) + + def test_small_prime(self): + assert is_prime(123456791) + + def test_special_composite(self): + assert not is_prime(10261) + + def test_medium_prime_1(self): + # nextPrime[2^256] + assert is_prime(2**256 + 0x129) + + def test_medium_prime_2(self): + # nextPrime(2^256+0x129) + assert is_prime(2**256 + 0x12D) + + def test_medium_trivial_composite(self): + assert not is_prime(2**256 + 0x130) + + def test_medium_non_trivial_composite(self): + assert not is_prime(2**256 + 0x12F) + + def test_large_prime(self): + # nextPrime[2^2048] + assert is_prime(mpz(2) ** 2048 + 0x3D5) + + def test_pseudoprime_base_19(self): + assert not is_prime(1543267864443420616877677640751301) + + def test_pseudoprime_base_300(self): + # F. Arnault "Constructing Carmichael Numbers Which Are Strong + # Pseudoprimes to Several Bases". Journal of Symbolic + # Computation. 20 (2): 151-161. doi:10.1006/jsco.1995.1042. + # Section 4.4 Large Example (a pseudoprime to all bases up to + # 300) + p = int( + "29 674 495 668 685 510 550 154 174 642 905 332 730 " + "771 991 799 853 043 350 995 075 531 276 838 753 171 " + "770 199 594 238 596 428 121 188 033 664 754 218 345 " + "562 493 168 782 883".replace(" ", "") + ) + + assert is_prime(p) + for _ in range(10): + if not is_prime(p * (313 * (p - 1) + 1) * (353 * (p - 1) + 1)): + break + else: + assert False, "composite not detected" class TestNumbertheory(unittest.TestCase): @@ -220,6 +342,7 @@ def test_gcd(self): "case times-out on it", ) @settings(**HYP_SLOW_SETTINGS) + @example([877 * 1151, 877 * 1009]) @given(st_comp_with_com_fac()) def test_gcd_with_com_factor(self, numbers): n = gcd(numbers) @@ -234,14 +357,16 @@ def test_gcd_with_com_factor(self, numbers): "case times-out on it", ) @settings(**HYP_SLOW_SETTINGS) + @example([1151, 1069, 1009]) @given(st_comp_no_com_fac()) def test_gcd_with_uncom_factor(self, numbers): n = gcd(numbers) assert n == 1 + @settings(**HYP_SLOW_SETTINGS) @given( st.lists( - st.integers(min_value=1, max_value=2 ** 8192), + st.integers(min_value=1, max_value=2**8192), min_size=1, max_size=20, ) @@ -257,9 +382,10 @@ def test_lcm(self): assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7 assert lcm(3) == 3 + @settings(**HYP_SLOW_SETTINGS) @given( st.lists( - st.integers(min_value=1, max_value=2 ** 8192), + st.integers(min_value=1, max_value=2**8192), min_size=1, max_size=20, ) @@ -275,7 +401,7 @@ def test_lcm_with_random_numbers(self, numbers): "meet requirements (like `is_prime()`), the test " "case times-out on it", ) - @settings(**HYP_SETTINGS) + @settings(**HYP_SLOW_SETTINGS) @given(st_num_square_prime()) def test_square_root_mod_prime(self, vals): square, prime = vals @@ -283,10 +409,11 @@ def test_square_root_mod_prime(self, vals): calc = square_root_mod_prime(square, prime) assert calc * calc % prime == square - @settings(**HYP_SETTINGS) - @given(st.integers(min_value=1, max_value=10 ** 12)) + @pytest.mark.slow + @settings(**HYP_SLOW_SETTINGS) + @given(st.integers(min_value=1, max_value=10**12)) @example(265399 * 1526929) - @example(373297 ** 2 * 553991) + @example(373297**2 * 553991) def test_factorization(self, num): factors = factorization(num) mult = 1 @@ -294,16 +421,45 @@ def test_factorization(self, num): mult *= i[0] ** i[1] assert mult == num - @settings(**HYP_SETTINGS) + def test_factorisation_smallprimes(self): + exp = 101 * 103 + assert 101 in smallprimes + assert 103 in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_factorisation_not_smallprimes(self): + exp = 1231 * 1237 + assert 1231 not in smallprimes + assert 1237 not in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_jacobi_with_zero(self): + assert jacobi(0, 3) == 0 + + def test_jacobi_with_one(self): + assert jacobi(1, 3) == 1 + + @settings(**HYP_SLOW_SETTINGS) @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2)) def test_jacobi(self, mod): + mod = mpz(mod) if is_prime(mod): squares = set() for root in range(1, mod): + root = mpz(root) assert jacobi(root * root, mod) == 1 squares.add(root * root % mod) for i in range(1, mod): if i not in squares: + i = mpz(i) assert jacobi(i, mod) == -1 else: factors = factorization(mod) @@ -313,6 +469,7 @@ def test_jacobi(self, mod): c *= jacobi(a, i[0]) ** i[1] assert c == jacobi(a, mod) + @settings(**HYP_SLOW_SETTINGS) @given(st_two_nums_rel_prime()) def test_inverse_mod(self, nums): num, mod = nums diff --git a/src/ecdsa/test_pyecdsa.py b/src/ecdsa/test_pyecdsa.py index 65b67160..799e9b74 100644 --- a/src/ecdsa/test_pyecdsa.py +++ b/src/ecdsa/test_pyecdsa.py @@ -1,31 +1,40 @@ -from __future__ import with_statement, division +from __future__ import with_statement, division, print_function try: import unittest2 as unittest except ImportError: import unittest import os -import time import shutil import subprocess import pytest +import sys from binascii import hexlify, unhexlify -from hashlib import sha1, sha256, sha384, sha512 import hashlib from functools import partial -from hypothesis import given +from hypothesis import given, settings import hypothesis.strategies as st -from six import b, print_, binary_type +from six import binary_type from .keys import SigningKey, VerifyingKey from .keys import BadSignatureError, MalformedPointError, BadDigestError from . import util -from .util import sigencode_der, sigencode_strings -from .util import sigdecode_der, sigdecode_strings +from .util import ( + sigencode_der, + sigencode_strings, + sigencode_strings_canonize, + sigencode_string_canonize, + sigencode_der_canonize, +) +from .util import sigdecode_der, sigdecode_strings, sigdecode_string from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature from .curves import Curve, UnknownCurveError from .curves import ( + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, NIST192p, NIST224p, NIST256p, @@ -39,6 +48,15 @@ BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, + Ed25519, + Ed448, curves, ) from .ecdsa import ( @@ -57,6 +75,13 @@ class SubprocessError(Exception): pass +HYP_SETTINGS = {} + + +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 + + def run_openssl(cmd): OPENSSL = "openssl" p = subprocess.Popen( @@ -78,24 +103,29 @@ def test_basic(self): priv = SigningKey.generate() pub = priv.get_verifying_key() - data = b("blahblah") + data = b"blahblah" sig = priv.sign(data) self.assertTrue(pub.verify(sig, data)) - self.assertRaises(BadSignatureError, pub.verify, sig, data + b("bad")) + self.assertRaises(BadSignatureError, pub.verify, sig, data + b"bad") pub2 = VerifyingKey.from_string(pub.to_string()) self.assertTrue(pub2.verify(sig, data)) def test_deterministic(self): - data = b("blahblah") + data = b"blahblah" secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16) - priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256) + priv = SigningKey.from_secret_exponent( + secexp, SECP256k1, hashlib.sha256 + ) pub = priv.get_verifying_key() k = rfc6979.generate_k( - SECP256k1.generator.order(), secexp, sha256, sha256(data).digest() + SECP256k1.generator.order(), + secexp, + hashlib.sha256, + hashlib.sha256(data).digest(), ) sig1 = priv.sign(data, k=k) @@ -104,7 +134,7 @@ def test_deterministic(self): sig2 = priv.sign(data, k=k) self.assertTrue(pub.verify(sig2, data)) - sig3 = priv.sign_deterministic(data, sha256) + sig3 = priv.sign_deterministic(data, hashlib.sha256) self.assertTrue(pub.verify(sig3, data)) self.assertEqual(sig1, sig2) @@ -115,41 +145,16 @@ def test_bad_usage(self): self.assertRaises(TypeError, SigningKey) self.assertRaises(TypeError, VerifyingKey) - def test_lengths(self): + def test_lengths_default(self): default = NIST192p priv = SigningKey.generate() pub = priv.get_verifying_key() self.assertEqual(len(pub.to_string()), default.verifying_key_length) - sig = priv.sign(b("data")) + sig = priv.sign(b"data") self.assertEqual(len(sig), default.signature_length) - for curve in ( - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - BRAINPOOLP160r1, - BRAINPOOLP192r1, - BRAINPOOLP224r1, - BRAINPOOLP256r1, - BRAINPOOLP320r1, - BRAINPOOLP384r1, - BRAINPOOLP512r1, - ): - start = time.time() - priv = SigningKey.generate(curve=curve) - pub1 = priv.get_verifying_key() - keygen_time = time.time() - start - pub2 = VerifyingKey.from_string(pub1.to_string(), curve) - self.assertEqual(pub1.to_string(), pub2.to_string()) - self.assertEqual(len(pub1.to_string()), curve.verifying_key_length) - start = time.time() - sig = priv.sign(b("data")) - sign_time = time.time() - start - self.assertEqual(len(sig), curve.signature_length) def test_serialize(self): - seed = b("secret") + seed = b"secret" curve = NIST192p secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order) secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order) @@ -162,7 +167,7 @@ def test_serialize(self): self.assertEqual(priv1.to_pem(), priv2.to_pem()) pub1 = priv1.get_verifying_key() pub2 = priv2.get_verifying_key() - data = b("data") + data = b"data" sig1 = priv1.sign(data) sig2 = priv2.sign(data) self.assertTrue(pub1.verify(sig1, data)) @@ -172,7 +177,7 @@ def test_serialize(self): self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string())) def test_nonrandom(self): - s = b("all the entropy in the entire world, compressed into one line") + s = b"all the entropy in the entire world, compressed into one line" def not_much_entropy(numbytes): return s[:numbytes] @@ -188,8 +193,8 @@ def not_much_entropy(numbytes): # want to do this with keys you care about, because the secrecy of # the private key depends upon using different random numbers for # each signature - sig1 = priv1.sign(b("data"), entropy=not_much_entropy) - sig2 = priv2.sign(b("data"), entropy=not_much_entropy) + sig1 = priv1.sign(b"data", entropy=not_much_entropy) + sig2 = priv2.sign(b"data", entropy=not_much_entropy) self.assertEqual(hexlify(sig1), hexlify(sig2)) def assertTruePrivkeysEqual(self, priv1, priv2): @@ -202,7 +207,7 @@ def assertTruePrivkeysEqual(self, priv1, priv2): ) def test_privkey_creation(self): - s = b("all the entropy in the entire world, compressed into one line") + s = b"all the entropy in the entire world, compressed into one line" def not_much_entropy(numbytes): return s[:numbytes] @@ -237,8 +242,8 @@ def test_privkey_strings(self): s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) - self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) @@ -250,8 +255,8 @@ def test_privkey_strings(self): priv1 = SigningKey.generate(curve=NIST256p) s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) - self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) @@ -264,8 +269,8 @@ def test_privkey_strings_brainpool(self): priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) s1 = priv1.to_pem() self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) - self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) priv2 = SigningKey.from_pem(s1) self.assertTruePrivkeysEqual(priv1, priv2) @@ -302,7 +307,7 @@ def test_pubkey_strings(self): self.assertTruePubkeysEqual(pub1, pub2) self.assertRaises( - der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b("junk") + der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b"junk" ) badpub = VerifyingKey.from_der(pub1_der) @@ -310,8 +315,15 @@ class FakeGenerator: def order(self): return 123456789 + class FakeCurveFp: + def p(self): + return int( + "6525534529039240705020950546962731340" + "4541085228058844382513856749047873406763" + ) + badcurve = Curve( - "unknown", None, FakeGenerator(), (1, 2, 3, 4, 5, 6), None + "unknown", FakeCurveFp(), FakeGenerator(), (1, 2, 3, 4, 5, 6), None ) badpub.curve = badcurve badder = badpub.to_der() @@ -319,10 +331,8 @@ def order(self): pem = pub1.to_pem() self.assertEqual(type(pem), binary_type) - self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem) - self.assertTrue( - pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem - ) + self.assertTrue(pem.startswith(b"-----BEGIN PUBLIC KEY-----"), pem) + self.assertTrue(pem.strip().endswith(b"-----END PUBLIC KEY-----"), pem) pub2 = VerifyingKey.from_pem(pem) self.assertTruePubkeysEqual(pub1, pub2) @@ -355,8 +365,8 @@ def test_sk_to_der_with_invalid_point_encoding(self): def test_vk_from_der_garbage_after_curve_oid(self): type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b( - "garbage" + curve_oid_der = ( + der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b"garbage" ) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) point_der = der.encode_bitstring(b"\x00\xff", None) @@ -379,7 +389,7 @@ def test_vk_from_der_garbage_after_point_string(self): type_oid_der = encoded_oid_ecPublicKey curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\x00\xff", None) + b("garbage") + point_der = der.encode_bitstring(b"\x00\xff", None) + b"garbage" to_decode = der.encode_sequence(enc_type_der, point_der) with self.assertRaises(der.UnexpectedDER): @@ -418,7 +428,7 @@ def test_vk_from_der_with_raw_encoding(self): def test_signature_strings(self): priv1 = SigningKey.generate() pub1 = priv1.get_verifying_key() - data = b("data") + data = b"data" sig = priv1.sign(data) self.assertEqual(type(sig), binary_type) @@ -438,24 +448,113 @@ def test_signature_strings(self): self.assertEqual(type(sig_der), binary_type) self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der)) + def test_sigencode_string_canonize_no_change(self): + r = 12 + s = 400 + order = SECP112r1.order + + new_r, new_s = sigdecode_string( + sigencode_string_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_string_canonize(self): + r = 12 + order = SECP112r1.order + s = order - 10 + + new_r, new_s = sigdecode_string( + sigencode_string_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_strings_canonize_no_change(self): + r = 12 + s = 400 + order = SECP112r1.order + + new_r, new_s = sigdecode_strings( + sigencode_strings_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_strings_canonize(self): + r = 12 + order = SECP112r1.order + s = order - 10 + + new_r, new_s = sigdecode_strings( + sigencode_strings_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_der_canonize_no_change(self): + r = 13 + s = 200 + order = SECP112r1.order + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_der_canonize(self): + r = 13 + order = SECP112r1.order + s = order - 14 + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_der_canonize_with_close_to_half_order(self): + r = 13 + order = SECP112r1.order + s = order // 2 + 1 + + regular_encode = sigencode_der(r, s, order) + canonical_encode = sigencode_der_canonize(r, s, order) + + self.assertNotEqual(regular_encode, canonical_encode) + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + def test_sig_decode_strings_with_invalid_count(self): with self.assertRaises(MalformedSignature): - sigdecode_strings([b("one"), b("two"), b("three")], 0xFF) + sigdecode_strings([b"one", b"two", b"three"], 0xFF) def test_sig_decode_strings_with_wrong_r_len(self): with self.assertRaises(MalformedSignature): - sigdecode_strings([b("one"), b("two")], 0xFF) + sigdecode_strings([b"one", b"two"], 0xFF) def test_sig_decode_strings_with_wrong_s_len(self): with self.assertRaises(MalformedSignature): - sigdecode_strings([b("\xa0"), b("\xb0\xff")], 0xFF) + sigdecode_strings([b"\xa0", b"\xb0\xff"], 0xFF) def test_verify_with_too_long_input(self): sk = SigningKey.generate() vk = sk.verifying_key with self.assertRaises(BadDigestError): - vk.verify_digest(None, b("\x00") * 128) + vk.verify_digest(None, b"\x00" * 128) def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): with self.assertRaises(MalformedPointError): @@ -463,11 +562,11 @@ def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): def test_sk_from_string_with_wrong_len_string(self): with self.assertRaises(MalformedPointError): - SigningKey.from_string(b("\x01")) + SigningKey.from_string(b"\x01") def test_sk_from_der_with_junk_after_sequence(self): ver_der = der.encode_integer(1) - to_decode = der.encode_sequence(ver_der) + b("garbage") + to_decode = der.encode_sequence(ver_der) + b"garbage" with self.assertRaises(der.UnexpectedDER): SigningKey.from_der(to_decode) @@ -481,7 +580,7 @@ def test_sk_from_der_with_wrong_version(self): def test_sk_from_der_invalid_const_tag(self): ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b("\x00\xff")) + privkey_der = der.encode_octet_string(b"\x00\xff") curve_oid_der = der.encode_oid(*(1, 2, 3)) const_der = der.encode_constructed(1, curve_oid_der) to_decode = der.encode_sequence( @@ -493,8 +592,8 @@ def test_sk_from_der_invalid_const_tag(self): def test_sk_from_der_garbage_after_privkey_oid(self): ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b("\x00\xff")) - curve_oid_der = der.encode_oid(*(1, 2, 3)) + b("garbage") + privkey_der = der.encode_octet_string(b"\x00\xff") + curve_oid_der = der.encode_oid(*(1, 2, 3)) + b"garbage" const_der = der.encode_constructed(0, curve_oid_der) to_decode = der.encode_sequence( ver_der, privkey_der, const_der, curve_oid_der @@ -505,7 +604,7 @@ def test_sk_from_der_garbage_after_privkey_oid(self): def test_sk_from_der_with_short_privkey(self): ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b("\x00\xff")) + privkey_der = der.encode_octet_string(b"\x00\xff") curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) const_der = der.encode_constructed(0, curve_oid_der) to_decode = der.encode_sequence( @@ -589,40 +688,42 @@ def test_sign_with_too_long_hash(self): sk = SigningKey.from_secret_exponent(12) with self.assertRaises(BadDigestError): - sk.sign_digest(b("\xff") * 64) + sk.sign_digest(b"\xff" * 64) def test_hashfunc(self): - sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256) - data = b("security level is 128 bits") + sk = SigningKey.generate(curve=NIST256p, hashfunc=hashlib.sha256) + data = b"security level is 128 bits" sig = sk.sign(data) vk = VerifyingKey.from_string( - sk.get_verifying_key().to_string(), curve=NIST256p, hashfunc=sha256 + sk.get_verifying_key().to_string(), + curve=NIST256p, + hashfunc=hashlib.sha256, ) self.assertTrue(vk.verify(sig, data)) sk2 = SigningKey.generate(curve=NIST256p) - sig2 = sk2.sign(data, hashfunc=sha256) + sig2 = sk2.sign(data, hashfunc=hashlib.sha256) vk2 = VerifyingKey.from_string( sk2.get_verifying_key().to_string(), curve=NIST256p, - hashfunc=sha256, + hashfunc=hashlib.sha256, ) self.assertTrue(vk2.verify(sig2, data)) vk3 = VerifyingKey.from_string( sk.get_verifying_key().to_string(), curve=NIST256p ) - self.assertTrue(vk3.verify(sig, data, hashfunc=sha256)) + self.assertTrue(vk3.verify(sig, data, hashfunc=hashlib.sha256)) def test_public_key_recovery(self): # Create keys - curve = NIST256p + curve = BRAINPOOLP160r1 sk = SigningKey.generate(curve=curve) vk = sk.get_verifying_key() # Sign a message - data = b("blahblah") + data = b"blahblah" signature = sk.sign(data) # Recover verifying keys @@ -642,25 +743,29 @@ def test_public_key_recovery(self): ) # Test if original vk is the list of recovered keys - self.assertTrue( - vk.pubkey.point - in [recovered_vk.pubkey.point for recovered_vk in recovered_vks] + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], ) def test_public_key_recovery_with_custom_hash(self): # Create keys - curve = NIST256p + curve = BRAINPOOLP160r1 - sk = SigningKey.generate(curve=curve, hashfunc=sha256) + sk = SigningKey.generate(curve=curve, hashfunc=hashlib.sha256) vk = sk.get_verifying_key() # Sign a message - data = b("blahblah") + data = b"blahblah" signature = sk.sign(data) # Recover verifying keys recovered_vks = VerifyingKey.from_public_key_recovery( - signature, data, curve, hashfunc=sha256 + signature, + data, + curve, + hashfunc=hashlib.sha256, + allow_truncate=True, ) # Test if each pk is valid @@ -670,101 +775,193 @@ def test_public_key_recovery_with_custom_hash(self): # Test if properties are equal self.assertEqual(vk.curve, recovered_vk.curve) - self.assertEqual(sha256, recovered_vk.default_hashfunc) + self.assertEqual(hashlib.sha256, recovered_vk.default_hashfunc) # Test if original vk is the list of recovered keys - self.assertTrue( - vk.pubkey.point - in [recovered_vk.pubkey.point for recovered_vk in recovered_vks] + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], ) def test_encoding(self): sk = SigningKey.from_secret_exponent(123456789) vk = sk.verifying_key - exp = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + exp = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) self.assertEqual(vk.to_string(), exp) self.assertEqual(vk.to_string("raw"), exp) - self.assertEqual(vk.to_string("uncompressed"), b("\x04") + exp) - self.assertEqual(vk.to_string("compressed"), b("\x02") + exp[:24]) - self.assertEqual(vk.to_string("hybrid"), b("\x06") + exp) + self.assertEqual(vk.to_string("uncompressed"), b"\x04" + exp) + self.assertEqual(vk.to_string("compressed"), b"\x02" + exp[:24]) + self.assertEqual(vk.to_string("hybrid"), b"\x06" + exp) def test_decoding(self): sk = SigningKey.from_secret_exponent(123456789) vk = sk.verifying_key - enc = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) from_raw = VerifyingKey.from_string(enc) self.assertEqual(from_raw.pubkey.point, vk.pubkey.point) - from_uncompressed = VerifyingKey.from_string(b("\x04") + enc) + from_uncompressed = VerifyingKey.from_string(b"\x04" + enc) self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) - from_compressed = VerifyingKey.from_string(b("\x02") + enc[:24]) + from_compressed = VerifyingKey.from_string(b"\x02" + enc[:24]) self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point) - from_uncompressed = VerifyingKey.from_string(b("\x06") + enc) + from_uncompressed = VerifyingKey.from_string(b"\x06" + enc) self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) + def test_uncompressed_decoding_as_only_alowed(self): + enc = ( + b"\x04" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + vk = VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + sk = SigningKey.from_secret_exponent(123456789) + + self.assertEqual(vk, sk.verifying_key) + + def test_raw_decoding_with_blocked_format(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("hybrid", str(exp.exception)) + + def test_decoding_with_unknown_format(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_string(b"", valid_encodings=("raw", "foobar")) + + self.assertIn("Only uncompressed, compressed", str(e.exception)) + + def test_uncompressed_decoding_with_blocked_format(self): + enc = ( + b"\x04" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_hybrid_decoding_with_blocked_format(self): + enc = ( + b"\x06" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_hybrid_decoding_with_inconsistent_encoding_and_no_validation( + self, + ): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + enc = vk.to_string("hybrid") + self.assertEqual(enc[:1], b"\x06") + enc = b"\x07" + enc[1:] + + b = VerifyingKey.from_string( + enc, valid_encodings=("hybrid",), validate_point=False + ) + + self.assertEqual(vk, b) + + def test_compressed_decoding_with_blocked_format(self): + enc = ( + b"\x02" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + )[:25] + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid", "raw")) + + self.assertIn("(hybrid, raw)", str(exp.exception)) + def test_decoding_with_malformed_uncompressed(self): - enc = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x02") + enc) + VerifyingKey.from_string(b"\x02" + enc) def test_decoding_with_malformed_compressed(self): - enc = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x01") + enc[:24]) + VerifyingKey.from_string(b"\x01" + enc[:24]) def test_decoding_with_inconsistent_hybrid(self): - enc = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x07") + enc) + VerifyingKey.from_string(b"\x07" + enc) + + def test_decoding_with_inconsistent_hybrid_odd_point(self): + sk = SigningKey.from_secret_exponent(123456791) + vk = sk.verifying_key + + enc = vk.to_string("hybrid") + self.assertEqual(enc[:1], b"\x07") + enc = b"\x06" + enc[1:] + + with self.assertRaises(MalformedPointError): + b = VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) def test_decoding_with_point_not_on_curve(self): - enc = b( - "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" ) with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(enc[:47] + b("\x00")) + VerifyingKey.from_string(enc[:47] + b"\x00") def test_decoding_with_point_at_infinity(self): # decoding it is unsupported, as it's not necessary to encode it with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x00")) + VerifyingKey.from_string(b"\x00") def test_not_lying_on_curve(self): enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1) with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x02") + enc) + VerifyingKey.from_string(b"\x02" + enc) def test_from_string_with_invalid_curve_too_short_ver_key_len(self): # both verifying_key_length and baselen are calculated internally @@ -775,7 +972,7 @@ def test_from_string_with_invalid_curve_too_short_ver_key_len(self): curve.baselen = 32 with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x00") * 16, curve) + VerifyingKey.from_string(b"\x00" * 16, curve) def test_from_string_with_invalid_curve_too_long_ver_key_len(self): # both verifying_key_length and baselen are calculated internally @@ -786,7 +983,7 @@ def test_from_string_with_invalid_curve_too_long_ver_key_len(self): curve.baselen = 16 with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b("\x00") * 16, curve) + VerifyingKey.from_string(b"\x00" * 16, curve) @pytest.mark.parametrize( @@ -796,9 +993,9 @@ def test_VerifyingKey_decode_with_small_values(val, even): enc = number_to_string(val, NIST192p.order) if even: - enc = b("\x02") + enc + enc = b"\x02" + enc else: - enc = b("\x03") + enc + enc = b"\x03" + enc # small values can both be actual valid public keys and not, verify that # only expected exceptions are raised if they are not @@ -829,6 +1026,24 @@ def test_VerifyingKey_encode_decode(curve, encoding): assert vk.pubkey.point == from_enc.pubkey.point +if "--fast" in sys.argv: # pragma: no cover + params = [NIST192p, BRAINPOOLP160r1] +else: + params = curves + + +@pytest.mark.parametrize("curve", params) +def test_lengths(curve): + priv = SigningKey.generate(curve=curve) + pub1 = priv.get_verifying_key() + pub2 = VerifyingKey.from_string(pub1.to_string(), curve) + assert pub1.to_string() == pub2.to_string() + assert len(pub1.to_string()) == curve.verifying_key_length + sig = priv.sign(b"data") + assert len(sig) == curve.signature_length + + +@pytest.mark.slow class OpenSSL(unittest.TestCase): # test interoperability with OpenSSL tools. Note that openssl's ECDSA # sign/verify arguments changed between 0.9.8 and 1.0.0: the early @@ -865,6 +1080,39 @@ def get_openssl_messagedigest_arg(self, hash_name): # vk: 3:OpenSSL->python 4:python->OpenSSL # sig: 5:OpenSSL->python 6:python->OpenSSL + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_from_openssl_secp112r1(self): + return self.do_test_from_openssl(SECP112r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_from_openssl_secp112r2(self): + return self.do_test_from_openssl(SECP112r2) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_from_openssl_secp128r1(self): + return self.do_test_from_openssl(SECP128r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_from_openssl_secp160r1(self): + return self.do_test_from_openssl(SECP160r1) + + @pytest.mark.slow @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", @@ -872,6 +1120,7 @@ def get_openssl_messagedigest_arg(self, hash_name): def test_from_openssl_nist192p(self): return self.do_test_from_openssl(NIST192p) + @pytest.mark.slow @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", @@ -879,6 +1128,7 @@ def test_from_openssl_nist192p(self): def test_from_openssl_nist192p_sha256(self): return self.do_test_from_openssl(NIST192p, "SHA256") + @pytest.mark.slow @pytest.mark.skipif( "secp224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp224r1", @@ -886,6 +1136,7 @@ def test_from_openssl_nist192p_sha256(self): def test_from_openssl_nist224p(self): return self.do_test_from_openssl(NIST224p) + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -893,6 +1144,7 @@ def test_from_openssl_nist224p(self): def test_from_openssl_nist256p(self): return self.do_test_from_openssl(NIST256p) + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -900,6 +1152,7 @@ def test_from_openssl_nist256p(self): def test_from_openssl_nist256p_sha384(self): return self.do_test_from_openssl(NIST256p, "SHA384") + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -907,6 +1160,7 @@ def test_from_openssl_nist256p_sha384(self): def test_from_openssl_nist256p_sha512(self): return self.do_test_from_openssl(NIST256p, "SHA512") + @pytest.mark.slow @pytest.mark.skipif( "secp384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp384r1", @@ -914,6 +1168,7 @@ def test_from_openssl_nist256p_sha512(self): def test_from_openssl_nist384p(self): return self.do_test_from_openssl(NIST384p) + @pytest.mark.slow @pytest.mark.skipif( "secp521r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp521r1", @@ -921,6 +1176,7 @@ def test_from_openssl_nist384p(self): def test_from_openssl_nist521p(self): return self.do_test_from_openssl(NIST521p) + @pytest.mark.slow @pytest.mark.skipif( "secp256k1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp256k1", @@ -928,6 +1184,7 @@ def test_from_openssl_nist521p(self): def test_from_openssl_secp256k1(self): return self.do_test_from_openssl(SECP256k1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP160r1", @@ -935,6 +1192,7 @@ def test_from_openssl_secp256k1(self): def test_from_openssl_brainpoolp160r1(self): return self.do_test_from_openssl(BRAINPOOLP160r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP192r1", @@ -942,6 +1200,7 @@ def test_from_openssl_brainpoolp160r1(self): def test_from_openssl_brainpoolp192r1(self): return self.do_test_from_openssl(BRAINPOOLP192r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP224r1", @@ -949,6 +1208,7 @@ def test_from_openssl_brainpoolp192r1(self): def test_from_openssl_brainpoolp224r1(self): return self.do_test_from_openssl(BRAINPOOLP224r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP256r1", @@ -956,6 +1216,7 @@ def test_from_openssl_brainpoolp224r1(self): def test_from_openssl_brainpoolp256r1(self): return self.do_test_from_openssl(BRAINPOOLP256r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP320r1", @@ -963,6 +1224,7 @@ def test_from_openssl_brainpoolp256r1(self): def test_from_openssl_brainpoolp320r1(self): return self.do_test_from_openssl(BRAINPOOLP320r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP384r1", @@ -970,6 +1232,7 @@ def test_from_openssl_brainpoolp320r1(self): def test_from_openssl_brainpoolp384r1(self): return self.do_test_from_openssl(BRAINPOOLP384r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP512r1", @@ -977,6 +1240,62 @@ def test_from_openssl_brainpoolp384r1(self): def test_from_openssl_brainpoolp512r1(self): return self.do_test_from_openssl(BRAINPOOLP512r1) + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160t1", + ) + def test_from_openssl_brainpoolp160t1(self): + return self.do_test_from_openssl(BRAINPOOLP160t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192t1", + ) + def test_from_openssl_brainpoolp192t1(self): + return self.do_test_from_openssl(BRAINPOOLP192t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224t1", + ) + def test_from_openssl_brainpoolp224t1(self): + return self.do_test_from_openssl(BRAINPOOLP224t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256t1", + ) + def test_from_openssl_brainpoolp256t1(self): + return self.do_test_from_openssl(BRAINPOOLP256t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320t1", + ) + def test_from_openssl_brainpoolp320t1(self): + return self.do_test_from_openssl(BRAINPOOLP320t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384t1", + ) + def test_from_openssl_brainpoolp384t1(self): + return self.do_test_from_openssl(BRAINPOOLP384t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512t1", + ) + def test_from_openssl_brainpoolp512t1(self): + return self.do_test_from_openssl(BRAINPOOLP512t1) + def do_test_from_openssl(self, curve, hash_name="SHA1"): curvename = curve.openssl_name assert curvename @@ -988,7 +1307,7 @@ def do_test_from_openssl(self, curve, hash_name="SHA1"): os.mkdir("t") run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename) run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem") - data = b("data") + data = b"data" with open("t/data.txt", "wb") as e: e.write(data) run_openssl( @@ -1029,6 +1348,39 @@ def do_test_from_openssl(self, curve, hash_name="SHA1"): sk_from_p8 = SigningKey.from_pem(privkey_p8_pem) self.assertEqual(sk, sk_from_p8) + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_to_openssl_secp112r1(self): + self.do_test_to_openssl(SECP112r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_to_openssl_secp112r2(self): + self.do_test_to_openssl(SECP112r2) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_to_openssl_secp128r1(self): + self.do_test_to_openssl(SECP128r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_to_openssl_secp160r1(self): + self.do_test_to_openssl(SECP160r1) + + @pytest.mark.slow @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", @@ -1036,6 +1388,7 @@ def do_test_from_openssl(self, curve, hash_name="SHA1"): def test_to_openssl_nist192p(self): self.do_test_to_openssl(NIST192p) + @pytest.mark.slow @pytest.mark.skipif( "prime192v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime192v1", @@ -1043,6 +1396,7 @@ def test_to_openssl_nist192p(self): def test_to_openssl_nist192p_sha256(self): self.do_test_to_openssl(NIST192p, "SHA256") + @pytest.mark.slow @pytest.mark.skipif( "secp224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp224r1", @@ -1050,6 +1404,7 @@ def test_to_openssl_nist192p_sha256(self): def test_to_openssl_nist224p(self): self.do_test_to_openssl(NIST224p) + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -1057,6 +1412,7 @@ def test_to_openssl_nist224p(self): def test_to_openssl_nist256p(self): self.do_test_to_openssl(NIST256p) + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -1064,6 +1420,7 @@ def test_to_openssl_nist256p(self): def test_to_openssl_nist256p_sha384(self): self.do_test_to_openssl(NIST256p, "SHA384") + @pytest.mark.slow @pytest.mark.skipif( "prime256v1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support prime256v1", @@ -1071,6 +1428,7 @@ def test_to_openssl_nist256p_sha384(self): def test_to_openssl_nist256p_sha512(self): self.do_test_to_openssl(NIST256p, "SHA512") + @pytest.mark.slow @pytest.mark.skipif( "secp384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp384r1", @@ -1078,6 +1436,7 @@ def test_to_openssl_nist256p_sha512(self): def test_to_openssl_nist384p(self): self.do_test_to_openssl(NIST384p) + @pytest.mark.slow @pytest.mark.skipif( "secp521r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp521r1", @@ -1085,6 +1444,7 @@ def test_to_openssl_nist384p(self): def test_to_openssl_nist521p(self): self.do_test_to_openssl(NIST521p) + @pytest.mark.slow @pytest.mark.skipif( "secp256k1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support secp256k1", @@ -1092,6 +1452,7 @@ def test_to_openssl_nist521p(self): def test_to_openssl_secp256k1(self): self.do_test_to_openssl(SECP256k1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP160r1", @@ -1099,6 +1460,7 @@ def test_to_openssl_secp256k1(self): def test_to_openssl_brainpoolp160r1(self): self.do_test_to_openssl(BRAINPOOLP160r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP192r1", @@ -1106,6 +1468,7 @@ def test_to_openssl_brainpoolp160r1(self): def test_to_openssl_brainpoolp192r1(self): self.do_test_to_openssl(BRAINPOOLP192r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP224r1", @@ -1113,6 +1476,7 @@ def test_to_openssl_brainpoolp192r1(self): def test_to_openssl_brainpoolp224r1(self): self.do_test_to_openssl(BRAINPOOLP224r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP256r1", @@ -1120,6 +1484,7 @@ def test_to_openssl_brainpoolp224r1(self): def test_to_openssl_brainpoolp256r1(self): self.do_test_to_openssl(BRAINPOOLP256r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP320r1", @@ -1127,6 +1492,7 @@ def test_to_openssl_brainpoolp256r1(self): def test_to_openssl_brainpoolp320r1(self): self.do_test_to_openssl(BRAINPOOLP320r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP384r1", @@ -1134,6 +1500,7 @@ def test_to_openssl_brainpoolp320r1(self): def test_to_openssl_brainpoolp384r1(self): self.do_test_to_openssl(BRAINPOOLP384r1) + @pytest.mark.slow @pytest.mark.skipif( "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, reason="system openssl does not support brainpoolP512r1", @@ -1141,6 +1508,62 @@ def test_to_openssl_brainpoolp384r1(self): def test_to_openssl_brainpoolp512r1(self): self.do_test_to_openssl(BRAINPOOLP512r1) + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160t1", + ) + def test_to_openssl_brainpoolp160t1(self): + self.do_test_to_openssl(BRAINPOOLP160t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192t1", + ) + def test_to_openssl_brainpoolp192t1(self): + self.do_test_to_openssl(BRAINPOOLP192t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224t1", + ) + def test_to_openssl_brainpoolp224t1(self): + self.do_test_to_openssl(BRAINPOOLP224t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256t1", + ) + def test_to_openssl_brainpoolp256t1(self): + self.do_test_to_openssl(BRAINPOOLP256t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320t1", + ) + def test_to_openssl_brainpoolp320t1(self): + self.do_test_to_openssl(BRAINPOOLP320t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384t1", + ) + def test_to_openssl_brainpoolp384t1(self): + self.do_test_to_openssl(BRAINPOOLP384t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512t1", + ) + def test_to_openssl_brainpoolp512t1(self): + self.do_test_to_openssl(BRAINPOOLP512t1) + def do_test_to_openssl(self, curve, hash_name="SHA1"): curvename = curve.openssl_name assert curvename @@ -1152,7 +1575,7 @@ def do_test_to_openssl(self, curve, hash_name="SHA1"): os.mkdir("t") sk = SigningKey.generate(curve=curve) vk = sk.get_verifying_key() - data = b("data") + data = b"data" with open("t/pubkey.der", "wb") as e: e.write(vk.to_der()) # 4 with open("t/pubkey.pem", "wb") as e: @@ -1168,7 +1591,7 @@ def do_test_to_openssl(self, curve, hash_name="SHA1"): with open("t/data.txt", "wb") as e: e.write(data) with open("t/baddata.txt", "wb") as e: - e.write(data + b("corrupt")) + e.write(data + b"corrupt") self.assertRaises( SubprocessError, @@ -1191,6 +1614,17 @@ def do_test_to_openssl(self, curve, hash_name="SHA1"): % mdarg ) + with open("t/privkey-explicit.pem", "wb") as e: + e.write(sk.to_pem(curve_parameters_encoding="explicit")) + run_openssl( + "dgst %s -sign t/privkey-explicit.pem -out t/data.sig2 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" + % mdarg + ) + with open("t/privkey-p8.pem", "wb") as e: e.write(sk.to_pem(format="pkcs8")) run_openssl( @@ -1202,6 +1636,135 @@ def do_test_to_openssl(self, curve, hash_name="SHA1"): % mdarg ) + with open("t/privkey-p8-explicit.pem", "wb") as e: + e.write( + sk.to_pem(format="pkcs8", curve_parameters_encoding="explicit") + ) + run_openssl( + "dgst %s -sign t/privkey-p8-explicit.pem -out t/data.sig3 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" + % mdarg + ) + + OPENSSL_SUPPORTED_TYPES = set() + try: + if "-rawin" in run_openssl("pkeyutl -help"): + OPENSSL_SUPPORTED_TYPES = set( # pragma: no branch + c.lower() + for c in ("ED25519", "ED448") + if c in run_openssl("list -public-key-methods") + ) + except SubprocessError: # pragma: no cover + pass + + def do_eddsa_test_to_openssl(self, curve): + if os.path.isdir("t"): + shutil.rmtree("t") + os.mkdir("t") + + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + + data = b"data" + with open("t/pubkey.der", "wb") as e: + e.write(vk.to_der()) + with open("t/pubkey.pem", "wb") as e: + e.write(vk.to_pem()) + + sig = sk.sign(data) + + with open("t/data.sig", "wb") as e: + e.write(sig) + with open("t/data.txt", "wb") as e: + e.write(data) + with open("t/baddata.txt", "wb") as e: + e.write(data + b"corrupt") + + with self.assertRaises(SubprocessError): + run_openssl( + "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " + "-in t/baddata.txt -sigfile t/data.sig" + ) + run_openssl( + "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " + "-in t/data.txt -sigfile t/data.sig" + ) + + shutil.rmtree("t") + + # in practice at least OpenSSL 3.0.0 is needed to make EdDSA signatures + # earlier versions support EdDSA only in X.509 certificates + @pytest.mark.slow + @pytest.mark.skipif( + "ed25519" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed25519", + ) + def test_to_openssl_ed25519(self): + return self.do_eddsa_test_to_openssl(Ed25519) + + @pytest.mark.slow + @pytest.mark.skipif( + "ed448" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed448", + ) + def test_to_openssl_ed448(self): + return self.do_eddsa_test_to_openssl(Ed448) + + def do_eddsa_test_from_openssl(self, curve): + curvename = curve.name + + if os.path.isdir("t"): + shutil.rmtree("t") + os.mkdir("t") + + data = b"data" + + run_openssl( + "genpkey -algorithm {0} -outform PEM -out t/privkey.pem".format( + curvename + ) + ) + run_openssl( + "pkey -outform PEM -pubout -in t/privkey.pem -out t/pubkey.pem" + ) + + with open("t/data.txt", "wb") as e: + e.write(data) + run_openssl( + "pkeyutl -sign -inkey t/privkey.pem " + "-rawin -in t/data.txt -out t/data.sig" + ) + + with open("t/data.sig", "rb") as e: + sig = e.read() + with open("t/pubkey.pem", "rb") as e: + vk = VerifyingKey.from_pem(e.read()) + + self.assertIs(vk.curve, curve) + + vk.verify(sig, data) + + shutil.rmtree("t") + + @pytest.mark.slow + @pytest.mark.skipif( + "ed25519" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed25519", + ) + def test_from_openssl_ed25519(self): + return self.do_eddsa_test_from_openssl(Ed25519) + + @pytest.mark.slow + @pytest.mark.skipif( + "ed448" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed448", + ) + def test_from_openssl_ed448(self): + return self.do_eddsa_test_from_openssl(Ed448) + class TooSmallCurve(unittest.TestCase): OPENSSL_SUPPORTED_CURVES = set( @@ -1215,8 +1778,7 @@ class TooSmallCurve(unittest.TestCase): ) def test_sign_too_small_curve_dont_allow_truncate_raises(self): sk = SigningKey.generate(curve=NIST192p) - vk = sk.get_verifying_key() - data = b("data") + data = b"data" with self.assertRaises(BadDigestError): sk.sign( data, @@ -1232,7 +1794,7 @@ def test_sign_too_small_curve_dont_allow_truncate_raises(self): def test_verify_too_small_curve_dont_allow_truncate_raises(self): sk = SigningKey.generate(curve=NIST192p) vk = sk.get_verifying_key() - data = b("data") + data = b"data" sig_der = sk.sign( data, hashfunc=partial(hashlib.new, "SHA256"), @@ -1251,108 +1813,121 @@ def test_verify_too_small_curve_dont_allow_truncate_raises(self): class DER(unittest.TestCase): def test_integer(self): - self.assertEqual(der.encode_integer(0), b("\x02\x01\x00")) - self.assertEqual(der.encode_integer(1), b("\x02\x01\x01")) - self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f")) - self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80")) - self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00")) - # self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff")) + self.assertEqual(der.encode_integer(0), b"\x02\x01\x00") + self.assertEqual(der.encode_integer(1), b"\x02\x01\x01") + self.assertEqual(der.encode_integer(127), b"\x02\x01\x7f") + self.assertEqual(der.encode_integer(128), b"\x02\x02\x00\x80") + self.assertEqual(der.encode_integer(256), b"\x02\x02\x01\x00") + # self.assertEqual(der.encode_integer(-1), b"\x02\x01\xff") def s(n): - return der.remove_integer(der.encode_integer(n) + b("junk")) + return der.remove_integer(der.encode_integer(n) + b"junk") - self.assertEqual(s(0), (0, b("junk"))) - self.assertEqual(s(1), (1, b("junk"))) - self.assertEqual(s(127), (127, b("junk"))) - self.assertEqual(s(128), (128, b("junk"))) - self.assertEqual(s(256), (256, b("junk"))) + self.assertEqual(s(0), (0, b"junk")) + self.assertEqual(s(1), (1, b"junk")) + self.assertEqual(s(127), (127, b"junk")) + self.assertEqual(s(128), (128, b"junk")) + self.assertEqual(s(256), (256, b"junk")) self.assertEqual( s(1234567890123456789012345678901234567890), - (1234567890123456789012345678901234567890, b("junk")), + (1234567890123456789012345678901234567890, b"junk"), ) def test_number(self): - self.assertEqual(der.encode_number(0), b("\x00")) - self.assertEqual(der.encode_number(127), b("\x7f")) - self.assertEqual(der.encode_number(128), b("\x81\x00")) - self.assertEqual(der.encode_number(3 * 128 + 7), b("\x83\x07")) + self.assertEqual(der.encode_number(0), b"\x00") + self.assertEqual(der.encode_number(127), b"\x7f") + self.assertEqual(der.encode_number(128), b"\x81\x00") + self.assertEqual(der.encode_number(3 * 128 + 7), b"\x83\x07") # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2)) - # self.assertEqual(der.encode_number(155), b("\x81\x9b")) + # self.assertEqual(der.encode_number(155), b"\x81\x9b") for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155): - x = der.encode_number(n) + b("more") + x = der.encode_number(n) + b"more" n1, llen = der.read_number(x) self.assertEqual(n1, n) - self.assertEqual(x[llen:], b("more")) + self.assertEqual(x[llen:], b"more") def test_length(self): - self.assertEqual(der.encode_length(0), b("\x00")) - self.assertEqual(der.encode_length(127), b("\x7f")) - self.assertEqual(der.encode_length(128), b("\x81\x80")) - self.assertEqual(der.encode_length(255), b("\x81\xff")) - self.assertEqual(der.encode_length(256), b("\x82\x01\x00")) - self.assertEqual(der.encode_length(3 * 256 + 7), b("\x82\x03\x07")) - self.assertEqual(der.read_length(b("\x81\x9b") + b("more")), (155, 2)) - self.assertEqual(der.encode_length(155), b("\x81\x9b")) + self.assertEqual(der.encode_length(0), b"\x00") + self.assertEqual(der.encode_length(127), b"\x7f") + self.assertEqual(der.encode_length(128), b"\x81\x80") + self.assertEqual(der.encode_length(255), b"\x81\xff") + self.assertEqual(der.encode_length(256), b"\x82\x01\x00") + self.assertEqual(der.encode_length(3 * 256 + 7), b"\x82\x03\x07") + self.assertEqual(der.read_length(b"\x81\x9b" + b"more"), (155, 2)) + self.assertEqual(der.encode_length(155), b"\x81\x9b") for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155): - x = der.encode_length(n) + b("more") + x = der.encode_length(n) + b"more" n1, llen = der.read_length(x) self.assertEqual(n1, n) - self.assertEqual(x[llen:], b("more")) + self.assertEqual(x[llen:], b"more") def test_sequence(self): - x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI") - self.assertEqual(x, b("\x30\x06ABCDEFGHI")) + x = der.encode_sequence(b"ABC", b"DEF") + b"GHI" + self.assertEqual(x, b"\x30\x06ABCDEFGHI") x1, rest = der.remove_sequence(x) - self.assertEqual(x1, b("ABCDEF")) - self.assertEqual(rest, b("GHI")) + self.assertEqual(x1, b"ABCDEF") + self.assertEqual(rest, b"GHI") def test_constructed(self): x = der.encode_constructed(0, NIST224p.encoded_oid) - self.assertEqual(hexlify(x), b("a007") + b("06052b81040021")) - x = der.encode_constructed(1, unhexlify(b("0102030a0b0c"))) - self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c")) + self.assertEqual(hexlify(x), b"a007" + b"06052b81040021") + x = der.encode_constructed(1, unhexlify(b"0102030a0b0c")) + self.assertEqual(hexlify(x), b"a106" + b"0102030a0b0c") class Util(unittest.TestCase): + @pytest.mark.slow def test_trytryagain(self): tta = util.randrange_from_seed__trytryagain for i in range(1000): seed = "seed-%d" % i for order in ( - 2 ** 8 - 2, - 2 ** 8 - 1, - 2 ** 8, - 2 ** 8 + 1, - 2 ** 8 + 2, - 2 ** 16 - 1, - 2 ** 16 + 1, + 2**8 - 2, + 2**8 - 1, + 2**8, + 2**8 + 1, + 2**8 + 2, + 2**16 - 1, + 2**16 + 1, ): n = tta(seed, order) self.assertTrue(1 <= n < order, (1, n, order)) # this trytryagain *does* provide long-term stability self.assertEqual( ("%x" % (tta("seed", NIST224p.order))).encode(), - b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"), + b"6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc", ) - @given(st.integers(min_value=0, max_value=10 ** 200)) + def test_trytryagain_single(self): + tta = util.randrange_from_seed__trytryagain + order = 2**8 - 2 + seed = b"text" + n = tta(seed, order) + # known issue: https://github.com/warner/python-ecdsa/issues/221 + if sys.version_info < (3, 0): # pragma: no branch + self.assertEqual(n, 228) + else: # pragma: no branch + self.assertEqual(n, 18) + + @settings(**HYP_SETTINGS) + @given(st.integers(min_value=0, max_value=10**200)) def test_randrange(self, i): # util.randrange does not provide long-term stability: we might # change the algorithm in the future. entropy = util.PRNG("seed-%d" % i) for order in ( - 2 ** 8 - 2, - 2 ** 8 - 1, - 2 ** 8, - 2 ** 16 - 1, - 2 ** 16 + 1, + 2**8 - 2, + 2**8 - 1, + 2**8, + 2**16 - 1, + 2**16 + 1, ): # that oddball 2**16+1 takes half our runtime n = util.randrange(order, entropy=entropy) self.assertTrue(1 <= n < order, (1, n, order)) def OFF_test_prove_uniformity(self): # pragma: no cover - order = 2 ** 8 - 2 + order = 2**8 - 2 counts = dict([(i, 0) for i in range(1, order)]) assert 0 not in counts assert order not in counts @@ -1363,7 +1938,7 @@ def OFF_test_prove_uniformity(self): # pragma: no cover # this technique should use the full range self.assertTrue(counts[order - 1]) for i in range(1, order): - print_("%3d: %s" % (i, "*" * (counts[i] // 100))) + print("%3d: %s" % (i, "*" * (counts[i] // 100))) class RFC6979(unittest.TestCase): @@ -1378,8 +1953,8 @@ def test_SECP256k1(self): self._do( generator=SECP256k1.generator, secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16), - hsh=sha256(b("sample")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, expected=int( "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16, @@ -1393,8 +1968,8 @@ def test_SECP256k1_2(self): "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16, ), - hsh=sha256(b("sample")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, expected=int( "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16, @@ -1405,8 +1980,8 @@ def test_SECP256k1_3(self): self._do( generator=SECP256k1.generator, secexp=0x1, - hsh=sha256(b("Satoshi Nakamoto")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), + hash_func=hashlib.sha256, expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15, ) @@ -1414,12 +1989,10 @@ def test_SECP256k1_4(self): self._do( generator=SECP256k1.generator, secexp=0x1, - hsh=sha256( - b( - "All those moments will be lost in time, like tears in rain. Time to die..." - ) + hsh=hashlib.sha256( + b"All those moments will be lost in time, like tears in rain. Time to die..." ).digest(), - hash_func=sha256, + hash_func=hashlib.sha256, expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3, ) @@ -1427,8 +2000,8 @@ def test_SECP256k1_5(self): self._do( generator=SECP256k1.generator, secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140, - hsh=sha256(b("Satoshi Nakamoto")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), + hash_func=hashlib.sha256, expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90, ) @@ -1436,8 +2009,8 @@ def test_SECP256k1_6(self): self._do( generator=SECP256k1.generator, secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181, - hsh=sha256(b("Alan Turing")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"Alan Turing").digest(), + hash_func=hashlib.sha256, expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1, ) @@ -1452,11 +2025,9 @@ def test_1(self): ), secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16), hsh=unhexlify( - b( - "AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" - ) + b"AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" ), - hash_func=sha256, + hash_func=hashlib.sha256, expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16), ) @@ -1464,8 +2035,8 @@ def test_2(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha1(b("sample")).digest(), - hash_func=sha1, + hsh=hashlib.sha1(b"sample").digest(), + hash_func=hashlib.sha1, expected=int( "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16 ), @@ -1475,8 +2046,8 @@ def test_3(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha256(b("sample")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, expected=int( "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16 ), @@ -1486,8 +2057,8 @@ def test_4(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha512(b("sample")).digest(), - hash_func=sha512, + hsh=hashlib.sha512(b"sample").digest(), + hash_func=hashlib.sha512, expected=int( "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16 ), @@ -1497,8 +2068,8 @@ def test_5(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha1(b("test")).digest(), - hash_func=sha1, + hsh=hashlib.sha1(b"test").digest(), + hash_func=hashlib.sha1, expected=int( "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16 ), @@ -1508,8 +2079,8 @@ def test_6(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha256(b("test")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"test").digest(), + hash_func=hashlib.sha256, expected=int( "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16 ), @@ -1519,8 +2090,8 @@ def test_7(self): self._do( generator=NIST192p.generator, secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=sha512(b("test")).digest(), - hash_func=sha512, + hsh=hashlib.sha512(b"test").digest(), + hash_func=hashlib.sha512, expected=int( "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16 ), @@ -1533,8 +2104,8 @@ def test_8(self): "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), - hsh=sha1(b("sample")).digest(), - hash_func=sha1, + hsh=hashlib.sha1(b"sample").digest(), + hash_func=hashlib.sha1, expected=int( "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16, @@ -1548,8 +2119,8 @@ def test_9(self): "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), - hsh=sha256(b("sample")).digest(), - hash_func=sha256, + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, expected=int( "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16, @@ -1563,8 +2134,8 @@ def test_10(self): "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16, ), - hsh=sha512(b("test")).digest(), - hash_func=sha512, + hsh=hashlib.sha512(b"test").digest(), + hash_func=hashlib.sha512, expected=int( "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16, @@ -1667,6 +2238,7 @@ def test_brainpoolP256r1(self): ), ) + @pytest.mark.slow def test_brainpoolP384r1(self): self._do( curve=curve_brainpoolp384r1, @@ -1713,6 +2285,7 @@ def test_brainpoolP384r1(self): ), ) + @pytest.mark.slow def test_brainpoolP512r1(self): self._do( curve=curve_brainpoolp512r1, @@ -1817,6 +2390,7 @@ def test_brainpoolP256r1(self): ), ) + @pytest.mark.slow def test_brainpoolP384r1(self): self._do( curve=curve_brainpoolp384r1, @@ -1863,6 +2437,7 @@ def test_brainpoolP384r1(self): ), ) + @pytest.mark.slow def test_brainpoolP512r1(self): self._do( curve=curve_brainpoolp512r1, @@ -1928,7 +2503,7 @@ def test_brainpoolP512r1(self): "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D", "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE", b"abc", - sha256, + hashlib.sha256, "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C", "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315", NIST256p, @@ -1944,7 +2519,7 @@ def test_brainpoolP512r1(self): "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B" "854D7FA992F934D927376285E63414FA", b"abc", - sha384, + hashlib.sha384, "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E" "08E07C9C63F2D21A07DCB56A6AF56EB3", "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1" @@ -1966,7 +2541,7 @@ def test_brainpoolP512r1(self): "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95" "6C2F", b"abc", - sha512, + hashlib.sha512, "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339" "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055" "2251", diff --git a/src/ecdsa/test_rw_lock.py b/src/ecdsa/test_rw_lock.py deleted file mode 100644 index d3604825..00000000 --- a/src/ecdsa/test_rw_lock.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright Mateusz Kobos, (c) 2011 -# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ -# released under the MIT licence - -import unittest -import threading -import time -import copy -from ._rwlock import RWLock - - -class Writer(threading.Thread): - def __init__( - self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write - ): - """ - @param buffer_: common buffer_ shared by the readers and writers - @type buffer_: list - @type rw_lock: L{RWLock} - @param init_sleep_time: sleep time before doing any action - @type init_sleep_time: C{float} - @param sleep_time: sleep time while in critical section - @type sleep_time: C{float} - @param to_write: data that will be appended to the buffer - """ - threading.Thread.__init__(self) - self.__buffer = buffer_ - self.__rw_lock = rw_lock - self.__init_sleep_time = init_sleep_time - self.__sleep_time = sleep_time - self.__to_write = to_write - self.entry_time = None - """Time of entry to the critical section""" - self.exit_time = None - """Time of exit from the critical section""" - - def run(self): - time.sleep(self.__init_sleep_time) - self.__rw_lock.writer_acquire() - self.entry_time = time.time() - time.sleep(self.__sleep_time) - self.__buffer.append(self.__to_write) - self.exit_time = time.time() - self.__rw_lock.writer_release() - - -class Reader(threading.Thread): - def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time): - """ - @param buffer_: common buffer shared by the readers and writers - @type buffer_: list - @type rw_lock: L{RWLock} - @param init_sleep_time: sleep time before doing any action - @type init_sleep_time: C{float} - @param sleep_time: sleep time while in critical section - @type sleep_time: C{float} - """ - threading.Thread.__init__(self) - self.__buffer = buffer_ - self.__rw_lock = rw_lock - self.__init_sleep_time = init_sleep_time - self.__sleep_time = sleep_time - self.buffer_read = None - """a copy of a the buffer read while in critical section""" - self.entry_time = None - """Time of entry to the critical section""" - self.exit_time = None - """Time of exit from the critical section""" - - def run(self): - time.sleep(self.__init_sleep_time) - self.__rw_lock.reader_acquire() - self.entry_time = time.time() - time.sleep(self.__sleep_time) - self.buffer_read = copy.deepcopy(self.__buffer) - self.exit_time = time.time() - self.__rw_lock.reader_release() - - -class RWLockTestCase(unittest.TestCase): - def test_readers_nonexclusive_access(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Reader(buffer_, rw_lock, 0, 0)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0.3)) - threads.append(Reader(buffer_, rw_lock, 0.5, 0)) - - self.__start_and_join_threads(threads) - - ## The third reader should enter after the second one but it should - ## exit before the second one exits - ## (i.e. the readers should be in the critical section - ## at the same time) - - self.assertEqual([], threads[0].buffer_read) - self.assertEqual([1], threads[2].buffer_read) - self.assertEqual([1], threads[3].buffer_read) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[2].entry_time <= threads[3].entry_time) - self.assertTrue(threads[3].exit_time < threads[2].exit_time) - - def test_writers_exclusive_access(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1)) - threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2)) - threads.append(Reader(buffer_, rw_lock, 0.2, 0)) - - self.__start_and_join_threads(threads) - - ## The second writer should wait for the first one to exit - - self.assertEqual([1, 2], threads[2].buffer_read) - self.assertTrue(threads[0].exit_time <= threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].exit_time) - - def test_writer_priority(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) - threads.append(Reader(buffer_, rw_lock, 0.1, 0.4)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - - self.__start_and_join_threads(threads) - - ## The second writer should go before the second and the third reader - - self.assertEqual([1], threads[1].buffer_read) - self.assertEqual([1, 2], threads[3].buffer_read) - self.assertEqual([1, 2], threads[4].buffer_read) - self.assertTrue(threads[0].exit_time < threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[2].exit_time <= threads[3].entry_time) - self.assertTrue(threads[2].exit_time <= threads[4].entry_time) - - def test_many_writers_priority(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) - threads.append(Reader(buffer_, rw_lock, 0.1, 0.6)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - threads.append(Reader(buffer_, rw_lock, 0.4, 0)) - threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3)) - - self.__start_and_join_threads(threads) - - ## The two last writers should go first -- after the first reader and - ## before the second and the third reader - - self.assertEqual([1], threads[1].buffer_read) - self.assertEqual([1, 2, 3], threads[3].buffer_read) - self.assertEqual([1, 2, 3], threads[4].buffer_read) - self.assertTrue(threads[0].exit_time < threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[1].exit_time <= threads[5].entry_time) - self.assertTrue(threads[2].exit_time <= threads[3].entry_time) - self.assertTrue(threads[2].exit_time <= threads[4].entry_time) - self.assertTrue(threads[5].exit_time <= threads[3].entry_time) - self.assertTrue(threads[5].exit_time <= threads[4].entry_time) - - @staticmethod - def __init_variables(): - buffer_ = [] - rw_lock = RWLock() - threads = [] - return (buffer_, rw_lock, threads) - - @staticmethod - def __start_and_join_threads(threads): - for t in threads: - t.start() - for t in threads: - t.join() diff --git a/src/ecdsa/test_sha3.py b/src/ecdsa/test_sha3.py new file mode 100644 index 00000000..d30381d7 --- /dev/null +++ b/src/ecdsa/test_sha3.py @@ -0,0 +1,111 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest +import pytest + +try: + from gmpy2 import mpz + + GMPY = True +except ImportError: # pragma: no cover + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + +from ._sha3 import shake_256 +from ._compat import bytes_to_int, int_to_bytes + +B2I_VECTORS = [ + (b"\x00\x01", "big", 1), + (b"\x00\x01", "little", 0x0100), + (b"", "big", 0), + (b"\x00", "little", 0), +] + + +@pytest.mark.parametrize("bytes_in,endian,int_out", B2I_VECTORS) +def test_bytes_to_int(bytes_in, endian, int_out): + out = bytes_to_int(bytes_in, endian) + assert out == int_out + + +class TestBytesToInt(unittest.TestCase): + def test_bytes_to_int_wrong_endian(self): + with self.assertRaises(ValueError): + bytes_to_int(b"\x00", "middle") + + def test_int_to_bytes_wrong_endian(self): + with self.assertRaises(ValueError): + int_to_bytes(0, byteorder="middle") + + +@pytest.mark.skipif(GMPY == False, reason="requires gmpy or gmpy2") +def test_int_to_bytes_with_gmpy(): + assert int_to_bytes(mpz(1)) == b"\x01" + + +I2B_VECTORS = [ + (0, None, "big", b""), + (0, 1, "big", b"\x00"), + (1, None, "big", b"\x01"), + (0x0100, None, "little", b"\x00\x01"), + (0x0100, 4, "little", b"\x00\x01\x00\x00"), + (1, 4, "big", b"\x00\x00\x00\x01"), +] + + +@pytest.mark.parametrize("int_in,length,endian,bytes_out", I2B_VECTORS) +def test_int_to_bytes(int_in, length, endian, bytes_out): + out = int_to_bytes(int_in, length, endian) + assert out == bytes_out + + +SHAKE_256_VECTORS = [ + ( + b"Message.", + 32, + b"\x78\xa1\x37\xbb\x33\xae\xe2\x72\xb1\x02\x4f\x39\x43\xe5\xcf\x0c" + b"\x4e\x9c\x72\x76\x2e\x34\x4c\xf8\xf9\xc3\x25\x9d\x4f\x91\x2c\x3a", + ), + ( + b"", + 32, + b"\x46\xb9\xdd\x2b\x0b\xa8\x8d\x13\x23\x3b\x3f\xeb\x74\x3e\xeb\x24" + b"\x3f\xcd\x52\xea\x62\xb8\x1b\x82\xb5\x0c\x27\x64\x6e\xd5\x76\x2f", + ), + ( + b"message", + 32, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" + b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75", + ), + ( + b"message", + 16, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51", + ), + ( + b"message", + 64, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" + b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75" + b"\x30\xd6\xba\x2a\x46\x65\xf1\x9d\xf0\x62\x25\xb1\x26\xd1\x3e\xed" + b"\x91\xd5\x0d\xe7\xb9\xcb\x65\xf3\x3a\x46\xae\xd3\x6c\x7d\xc5\xe8", + ), + ( + b"A" * 1024, + 32, + b"\xa5\xef\x7e\x30\x8b\xe8\x33\x64\xe5\x9c\xf3\xb5\xf3\xba\x20\xa3" + b"\x5a\xe7\x30\xfd\xbc\x33\x11\xbf\x83\x89\x50\x82\xb4\x41\xe9\xb3", + ), +] + + +@pytest.mark.parametrize("msg,olen,ohash", SHAKE_256_VECTORS) +def test_shake_256(msg, olen, ohash): + out = shake_256(msg, olen) + assert out == bytearray(ohash) diff --git a/src/ecdsa/util.py b/src/ecdsa/util.py index e77d61c6..1aff5bf5 100644 --- a/src/ecdsa/util.py +++ b/src/ecdsa/util.py @@ -1,3 +1,16 @@ +""" +This module includes some utility functions. + +The methods most typically used are the sigencode and sigdecode functions +to be used with :func:`~ecdsa.keys.SigningKey.sign` and +:func:`~ecdsa.keys.VerifyingKey.verify` +respectively. See the :func:`sigencode_strings`, :func:`sigdecode_string`, +:func:`sigencode_der`, :func:`sigencode_strings_canonize`, +:func:`sigencode_string_canonize`, :func:`sigencode_der_canonize`, +:func:`sigdecode_strings`, :func:`sigdecode_string`, and +:func:`sigdecode_der` functions. +""" + from __future__ import division import os @@ -5,10 +18,11 @@ import binascii import sys from hashlib import sha256 -from six import PY2, int2byte, b, next +from six import PY2, int2byte, next from . import der from ._compat import normalise_bytes + # RFC5480: # The "unrestricted" algorithm identifier is: # id-ecPublicKey OBJECT IDENTIFIER ::= { @@ -33,13 +47,12 @@ oid_ecMQV = (1, 3, 132, 1, 13) -if sys.version_info >= (3,): +if sys.version_info >= (3,): # pragma: no branch def entropy_to_bits(ent_256): """Convert a bytestring to string of 0's and 1's""" return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8) - else: def entropy_to_bits(ent_256): @@ -47,12 +60,11 @@ def entropy_to_bits(ent_256): return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256) -if sys.version_info < (2, 7): +if sys.version_info < (2, 7): # pragma: no branch # Can't add a method to a built-in type so we are stuck with this def bit_length(x): return len(bin(x)) - 2 - else: def bit_length(x): @@ -99,7 +111,7 @@ def __init__(self, seed): def __call__(self, numbytes): a = [next(self.generator) for i in range(numbytes)] - if PY2: + if PY2: # pragma: no branch return "".join(a) else: return bytes(a) @@ -190,7 +202,7 @@ def randrange_from_seed__trytryagain(seed, order): bits, bytes, extrabits = bits_and_bytes(order) generate = PRNG(seed) while True: - extrabyte = b("") + extrabyte = b"" if extrabits: extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits)) guess = string_to_number(extrabyte + generate(bytes)) + 1 @@ -223,12 +235,24 @@ def string_to_number_fixedlen(string, order): return int(binascii.hexlify(string), 16) -# these methods are useful for the sigencode= argument to SK.sign() and the -# sigdecode= argument to VK.verify(), and control how the signature is packed -# or unpacked. +def sigencode_strings(r, s, order): + """ + Encode the signature to a pair of strings in a tuple + + Encodes signature into raw encoding (:term:`raw encoding`) with the + ``r`` and ``s`` parts of the signature encoded separately. + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. -def sigencode_strings(r, s, order): + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: tuple(bytes, bytes) + """ r_str = number_to_string(r, order) s_str = number_to_string(s, order) return (r_str, s_str) @@ -238,7 +262,7 @@ def sigencode_string(r, s, order): """ Encode the signature to raw format (:term:`raw encoding`) - It's expected that this function will be used as a `sigencode=` parameter + It's expected that this function will be used as a ``sigencode=`` parameter in :func:`ecdsa.keys.SigningKey.sign` method. :param int r: first parameter of the signature @@ -266,7 +290,7 @@ def sigencode_der(r, s, order): s INTEGER } - It's expected that this function will be used as a `sigencode=` parameter + It's expected that this function will be used as a ``sigencode=`` parameter in :func:`ecdsa.keys.SigningKey.sign` method. :param int r: first parameter of the signature @@ -280,25 +304,99 @@ def sigencode_der(r, s, order): return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) -# canonical versions of sigencode methods -# these enforce low S values, by negating the value (modulo the order) if -# above order/2 see CECKey::Sign() -# https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214 -def sigencode_strings_canonize(r, s, order): - if s > order / 2: +def _canonize(s, order): + """ + Internal function for ensuring that the ``s`` value of a signature is in + the "canonical" format. + + :param int s: the second parameter of ECDSA signature + :param int order: the order of the curve over which the signatures was + computed + + :return: canonical value of s + :rtype: int + """ + if s > order // 2: s = order - s + return s + + +def sigencode_strings_canonize(r, s, order): + """ + Encode the signature to a pair of strings in a tuple + + Encodes signature into raw encoding (:term:`raw encoding`) with the + ``r`` and ``s`` parts of the signature encoded separately. + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: tuple(bytes, bytes) + """ + s = _canonize(s, order) return sigencode_strings(r, s, order) def sigencode_string_canonize(r, s, order): - if s > order / 2: - s = order - s + """ + Encode the signature to raw format (:term:`raw encoding`) + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: bytes + """ + s = _canonize(s, order) return sigencode_string(r, s, order) def sigencode_der_canonize(r, s, order): - if s > order / 2: - s = order - s + """ + Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + Encodes the signature to the following :term:`ASN.1` structure:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: DER encoding of ECDSA signature + :rtype: bytes + """ + s = _canonize(s, order) return sigencode_der(r, s, order) @@ -323,7 +421,7 @@ def sigdecode_string(signature, order): the signature, with each encoded using the same amount of bytes depending on curve size/order. - It's expected that this function will be used as the `sigdecode=` + It's expected that this function will be used as the ``sigdecode=`` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param signature: encoded signature @@ -333,7 +431,7 @@ def sigdecode_string(signature, order): :raises MalformedSignature: when the encoding of the signature is invalid - :return: tuple with decoded 'r' and 's' values of signature + :return: tuple with decoded ``r`` and ``s`` values of signature :rtype: tuple of ints """ signature = normalise_bytes(signature) @@ -352,10 +450,10 @@ def sigdecode_strings(rs_strings, order): """ Decode the signature from two strings. - First string needs to be a big endian encoding of 'r', second needs to - be a big endian encoding of the 's' parameter of an ECDSA signature. + First string needs to be a big endian encoding of ``r``, second needs to + be a big endian encoding of the ``s`` parameter of an ECDSA signature. - It's expected that this function will be used as the `sigdecode=` + It's expected that this function will be used as the ``sigdecode=`` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param list rs_strings: list of two bytes-like objects, each encoding one @@ -364,7 +462,7 @@ def sigdecode_strings(rs_strings, order): :raises MalformedSignature: when the encoding of the signature is invalid - :return: tuple with decoded 'r' and 's' values of signature + :return: tuple with decoded ``r`` and ``s`` values of signature :rtype: tuple of ints """ if not len(rs_strings) == 2: @@ -406,7 +504,7 @@ def sigdecode_der(sig_der, order): s INTEGER } - It's expected that this function will be used as as the `sigdecode=` + It's expected that this function will be used as as the ``sigdecode=`` parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. :param sig_der: encoded signature @@ -416,7 +514,7 @@ def sigdecode_der(sig_der, order): :raises UnexpectedDER: when the encoding of signature is invalid - :return: tuple with decoded 'r' and 's' values of signature + :return: tuple with decoded ``r`` and ``s`` values of signature :rtype: tuple of ints """ sig_der = normalise_bytes(sig_der) diff --git a/tox.ini b/tox.ini index 0a484d2a..19c03175 100644 --- a/tox.ini +++ b/tox.ini @@ -1,21 +1,17 @@ [tox] -envlist = py26, py27, py33, py34, py35, py36, py37, py38, py, pypy, pypy3, gmpy2py27, gmpy2py38, gmpypy27, gmpypy38 +envlist = py26, py27, py35, py36, py37, py38, py39, py310, py311, py312, py313, py, pypy, pypy3, gmpy2py27, gmpy2py39, gmpy2py310, gmpypy27, gmpypy39, gmpypy310, codechecks [testenv] deps = - py{33}: py<1.5 - py{33}: pytest<3.3 - py{33}: enum34 - py{33}: hypothesis<3.44 py{26}: unittest2 py{26}: hypothesis<3 - py{26,27,34,35,36,37,38,py,py3}: pytest - py{27,34,35,36,37,38,py,py3}: hypothesis - gmpy2py{27,38}: gmpy2 - gmpypy{27,38}: gmpy - gmpy{2py27,2py38,py27,py38}: pytest - gmpy{2py27,2py38,py27,py38}: hypothesis + py{26,27,35,36,37,38,39,310,311,312,313,py,py3}: pytest + py{27,35,36,37,38,39,310,311,312,313,py,py3}: hypothesis + gmpy2py{27,39,310,311,312,313}: gmpy2 + gmpypy{27,39,310,311,312,313}: gmpy + gmpy{2py27,2py39,2py310,2py311,2py312,2py313,py27,py39,py310,py311,py312,py313}: pytest + gmpy{2py27,2py39,2py310,2py311,2py312,2py313,py27,py39,py310,py311,py312,py313}: hypothesis # six==1.9.0 comes from setup.py install_requires py27_old_six: six==1.9.0 py27_old_six: pytest @@ -30,8 +26,7 @@ deps = py27_old_gmpy2: hypothesis py: pytest py: hypothesis - py{33}: wheel<0.30 - coverage==4.5.4 + coverage commands = coverage run --branch -m pytest {posargs:src/ecdsa} [testenv:py27_old_gmpy] @@ -46,19 +41,59 @@ basepython = python2.7 [testenv:gmpypy27] basepython=python2.7 -[testenv:gmpypy38] -basepython=python3.8 +[testenv:gmpypy39] +basepython=python3.9 + +[testenv:gmpypy310] +basepython=python3.10 + +[testenv:gmpypy311] +basepython=python3.11 + +[testenv:gmpypy312] +basepython=python3.12 + +[testenv:gmpypy313] +basepython=python3.13 [testenv:gmpy2py27] basepython=python2.7 -[testenv:gmpy2py38] -basepython=python3.8 +[testenv:gmpy2py39] +basepython=python3.9 + +[testenv:gmpy2py310] +basepython=python3.10 + +[testenv:gmpy2py311] +basepython=python3.11 + +[testenv:gmpy2py312] +basepython=python3.12 + +[testenv:gmpy2py313] +basepython=python3.13 + +[testenv:instrumental] +basepython = python2.7 +deps = + gmpy2 + instrumental + hypothesis + pytest>=4.6.0 + coverage + six +commands = + instrumental -t ecdsa -i '.*test_.*|.*_version|.*_compat|.*_sha3' {envbindir}/pytest {posargs:src/ecdsa} + instrumental -f .instrumental.cov -sr [testenv:coverage] sitepackages=True whitelist_externals=coverage -commands = coverage run --branch -m pytest --hypothesis-show-statistics {posargs:src/ecdsa} +commands = + coverage run --branch -m pytest --hypothesis-show-statistics {posargs:src/ecdsa} + coverage html + coverage report -m [testenv:speed] commands = {envpython} speed.py @@ -74,12 +109,19 @@ commands = {envpython} speed.py [testenv:codechecks] basepython = python3 deps = - black==19.10b0 - flake8 + black==22.3.0 + flake8==6.1.0 commands = flake8 setup.py speed.py src black --check --line-length 79 . +[testenv:codeformat] +basepython = python3 +deps = + black==22.3.0 +commands = + black --line-length 79 . + [flake8] exclude = src/ecdsa/test*.py # We're just getting started. For now, ignore the following problems: diff --git a/versioneer.py b/versioneer.py index 0e49e951..81817f1f 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,4 +1,4 @@ -# Version: 0.17 +# Version: 0.21 """The Versioneer - like a rocketeer, but for versions. @@ -6,16 +6,12 @@ ============== * like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer +* https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) +* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update @@ -26,9 +22,10 @@ ## Quick Install -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) * run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` ## Version Identifiers @@ -60,7 +57,7 @@ for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. +uncommitted changes). The version identifier is used for multiple purposes: @@ -150,8 +147,8 @@ software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". -Other styles are available. See details.md in the Versioneer source tree for -descriptions. +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. ## Debugging @@ -165,7 +162,7 @@ Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). +[issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects @@ -179,7 +176,7 @@ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. + provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs @@ -193,9 +190,9 @@ Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve @@ -223,22 +220,10 @@ cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - ## Updating Versioneer @@ -264,6 +249,14 @@ direction and include code from all supported VCS systems, reducing the number of intermediate scripts. +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin ## License @@ -273,7 +266,18 @@ Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + """ +# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring +# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements +# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error +# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with +# pylint:disable=attribute-defined-outside-init,too-many-arguments from __future__ import print_function @@ -323,13 +327,13 @@ def get_root(): # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print( "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py) + % (os.path.dirname(my_path), versioneer_py) ) except NameError: pass @@ -338,14 +342,17 @@ def get_root(): def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or + # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) + parser = configparser.ConfigParser() + with open(setup_cfg, "r") as cfg_file: + if sys.version_info < (3, 0): + parser.readfp(cfg_file) + else: + parser.read_file(cfg_file) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): @@ -355,14 +362,25 @@ def get(parser, name): cfg = VersioneerConfig() cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") + if sys.version_info < (3, 0): + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + else: + # Dict-like interface for non-mandatory entries + section = parser["versioneer"] + + cfg.style = section.get("style", "") + cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = section.get("tag_prefix") + cfg.parentdir_prefix = section.get("parentdir_prefix") + cfg.verbose = section.get("verbose") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") return cfg @@ -376,13 +394,11 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f + HANDLERS.setdefault(vcs, {})[method] = f return f return decorate @@ -393,20 +409,20 @@ def run_command( ): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, + process = subprocess.Popen( + [command] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), ) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -418,20 +434,18 @@ def run_command( if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode LONG_VERSION_PY[ "git" -] = ''' +] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -439,7 +453,7 @@ def run_command( # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.17 (https://github.com/warner/python-versioneer) +# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -448,6 +462,7 @@ def run_command( import re import subprocess import sys +from typing import Callable, Dict def get_keywords(): @@ -485,12 +500,12 @@ class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: @@ -504,17 +519,17 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -526,15 +541,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): @@ -546,15 +559,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% @@ -571,22 +583,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -594,10 +605,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -610,11 +625,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d @@ -623,7 +638,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: @@ -632,6 +647,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %%s" %% r) return {"version": r, @@ -647,7 +667,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -655,11 +675,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) @@ -667,15 +689,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", + "%%s%%s" %% (tag_prefix, TAG_PREFIX_REGEX)], + cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -685,6 +708,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -701,7 +757,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces @@ -726,13 +782,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -770,19 +827,67 @@ def render_pep440(pieces): return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) + else: + rendered += ".post0.dev%%d" %% (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] + rendered = "0.post0.dev%%d" %% pieces["distance"] return rendered @@ -813,12 +918,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -889,10 +1023,14 @@ def render(pieces, style): if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -928,7 +1066,7 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): + for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, @@ -963,22 +1101,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -986,10 +1123,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -1002,11 +1143,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -1015,7 +1156,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1024,6 +1165,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue if verbose: print("picking %s" % r) return { @@ -1046,7 +1192,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -1054,10 +1200,12 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command( + _, rc = runner( GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True ) if rc != 0: @@ -1067,7 +1215,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( + describe_out, rc = runner( GITS, [ "describe", @@ -1076,7 +1224,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): "--always", "--long", "--match", - "%s*" % tag_prefix, + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), ], cwd=root, ) @@ -1084,7 +1232,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -1094,6 +1242,40 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner( + GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root + ) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -1110,7 +1292,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ( "unable to parse git-describe output: '%s'" % describe_out ) @@ -1138,15 +1320,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command( - GITS, ["rev-list", "HEAD", "--count"], cwd=root - ) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 0 ].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -1165,27 +1348,26 @@ def do_vcs_install(manifest_in, versionfile_source, ipy): if ipy: files.append(ipy) try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) + my_path = __file__ + if my_path.endswith(".pyc") or my_path.endswith(".pyo"): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except OSError: pass if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() + with open(".gitattributes", "a+") as fobj: + fobj.write("{0} export-subst\n".format(versionfile_source)) files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) @@ -1199,7 +1381,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return { @@ -1209,9 +1391,8 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): "error": None, "date": None, } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: print( @@ -1222,7 +1403,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.17) from +# This file was generated by 'versioneer.py' (0.21) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1244,7 +1425,7 @@ def versions_from_file(filename): try: with open(filename) as f: contents = f.read() - except EnvironmentError: + except OSError: raise NotThisMethod("unable to read _version.py") mo = re.search( r"version_json = '''\n(.*)''' # END VERSION_JSON", @@ -1305,19 +1486,71 @@ def render_pep440(pieces): return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post( + pieces["closest-tag"] + ) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % ( + post_version + 1, + pieces["distance"], + ) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -1348,12 +1581,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -1426,10 +1688,14 @@ def render(pieces, style): if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -1538,8 +1804,12 @@ def get_version(): return get_versions()["version"] -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and @@ -1553,9 +1823,9 @@ def get_cmdclass(): # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - cmds = {} + cmds = {} if cmdclass is None else cmdclass.copy() # we add "version" to both distutils and setuptools from distutils.core import Command @@ -1598,7 +1868,9 @@ def run(self): # setup.py egg_info -> ? # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: + if "build_py" in cmds: + _build_py = cmds["build_py"] + elif "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py @@ -1620,6 +1892,35 @@ def run(self): cmds["build_py"] = cmd_build_py + if "build_ext" in cmds: + _build_ext = cmds["build_ext"] + elif "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join( + self.build_lib, cfg.versionfile_build + ) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_ext"] = cmd_build_ext + if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe @@ -1658,10 +1959,7 @@ def run(self): del cmds["build_py"] if "py2exe" in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 + from py2exe.distutils_buildexe import py2exe as _py2exe class cmd_py2exe(_py2exe): def run(self): @@ -1690,7 +1988,9 @@ def run(self): cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: + if "sdist" in cmds: + _sdist = cmds["sdist"] + elif "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist @@ -1759,24 +2059,29 @@ def make_release_tree(self, base_dir, files): """ -INIT_PY_SNIPPET = """ +OLD_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" + """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except ( - EnvironmentError, + OSError, configparser.NoSectionError, configparser.NoOptionError, ) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + if isinstance(e, (OSError, configparser.NoSectionError)): print( "Adding sample versioneer config to setup.cfg", file=sys.stderr ) @@ -1804,12 +2109,18 @@ def do_setup(): try: with open(ipy, "r") as f: old = f.read() - except EnvironmentError: + except OSError: old = "" - if INIT_PY_SNIPPET not in old: + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) + f.write(snippet) else: print(" %s unmodified" % ipy) else: @@ -1828,7 +2139,7 @@ def do_setup(): if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) - except EnvironmentError: + except OSError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so