pax_global_header 0000666 0000000 0000000 00000000064 14603035227 0014514 g ustar 00root root 0000000 0000000 52 comment=bc805ed85d711873232873af8a47a8effd224fa3
contourpy-1.2.1/ 0000775 0000000 0000000 00000000000 14603035227 0013557 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/.cirrus.yml 0000664 0000000 0000000 00000004537 14603035227 0015700 0 ustar 00root root 0000000 0000000 linux_aarch64_test_task:
name: "Cirrus linux aarch64 ${PYTHON_VERSION} ${IMAGE_SUFFIX}"
only_if: $CIRRUS_BRANCH != "main"
arm_container:
# https://hub.docker.com/_/python/
image: python:${PYTHON_VERSION}-${IMAGE_SUFFIX}
matrix:
- env:
PYTHON_VERSION: "3.11"
IMAGE_SUFFIX: slim
BUILD_NUMPY: 0
TEST_NO_IMAGES: 0
#- env:
# PYTHON_VERSION: "3.11"
# IMAGE_SUFFIX: alpine
# BUILD_NUMPY: 1
# TEST_NO_IMAGES: 1
os_dependencies_script: |
if [[ "$IMAGE_SUFFIX" != "alpine" ]];
then
apt update;
apt install -yy g++;
else
apk update;
apk add build-base;
fi
python_venv_script: |
which python
python --version
python -m pip install --upgrade pip
python -m venv venv
source venv/bin/activate
install_numpy_from_source_script:
if [[ "$BUILD_NUMPY" == "1" ]];
then
python -m pip install -v --no-binary=numpy numpy;
fi
install_contourpy_script: |
if [[ "$TEST_NO_IMAGES" ]];
then
python -m pip install -v .[test-no-images] -Cbuilddir=build;
else
python -m pip install -v .[test] -Cbuilddir=build;
fi
python -m pip list
python -c "from contourpy.util import build_config; from pprint import pprint; pprint(build_config())"
run_tests_script: |
if [[ "$TEST_NO_IMAGES" ]];
then
python -m pytest -v tests/ -n 2 -k "not image";
else
python -m pytest -v tests/ -n 2;
fi
macos_arm64_test_task:
name: "Cirrus macos arm64 ${PYTHON_VERSION}"
only_if: $CIRRUS_BRANCH != "main"
macos_instance:
# https://github.com/cirruslabs/macos-image-templates
image: ghcr.io/cirruslabs/macos-monterey-xcode:14
matrix:
- env:
PYTHON_VERSION: "3.10"
os_dependencies_script: |
brew install python@${PYTHON_VERSION}
echo "PATH=/opt/homebrew/opt/python@${PYTHON_VERSION}/libexec/bin:$PATH" >> $CIRRUS_ENV
python_venv_script: |
env | grep PATH
which python
python --version
python -m pip install --upgrade pip
python -m venv venv
source venv/bin/activate
install_contourpy_script: |
python -m pip install -v .[test] -Cbuilddir=build
python -m pip list
python -c "from contourpy.util import build_config; from pprint import pprint; pprint(build_config())"
run_tests_script: |
python -m pytest -v tests/ -n 4
contourpy-1.2.1/.github/ 0000775 0000000 0000000 00000000000 14603035227 0015117 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/.github/CODE_OF_CONDUCT.md 0000664 0000000 0000000 00000012567 14603035227 0017731 0 ustar 00root root 0000000 0000000 # Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful.
Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at [ianthomas23@gmail.com](mailto:ianthomas23@gmail.com). All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of actions.
**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations
contourpy-1.2.1/.github/SECURITY.md 0000664 0000000 0000000 00000000465 14603035227 0016715 0 ustar 00root root 0000000 0000000 # Security Policy
## Supported Versions
| Version | Supported |
| ------- | ------------------ |
| 1.0.0 | :white_check_mark: |
| < 1.0 | :x: |
## Reporting a Vulnerability
To report a security vulnerability, please email [ianthomas23@gmail.com](mailto:ianthomas23@gmail.com).
contourpy-1.2.1/.github/codecov.yml 0000664 0000000 0000000 00000000416 14603035227 0017265 0 ustar 00root root 0000000 0000000 codecov:
notify:
after_n_builds: 2
wait_for_ci: no
comment: false
coverage:
range: 85..95
status:
patch:
default:
target: 50%
project:
default:
target: auto
threshold: 1%
github_checks:
annotations: false
contourpy-1.2.1/.github/workflows/ 0000775 0000000 0000000 00000000000 14603035227 0017154 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/.github/workflows/build_wheels.yml 0000664 0000000 0000000 00000004750 14603035227 0022353 0 ustar 00root root 0000000 0000000 name: Build binary wheels and sdist
on:
push:
tags:
- v*
workflow_dispatch:
defaults:
run:
shell: bash
jobs:
build_wheels:
name: ${{ matrix.arch }} wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
#env:
#MACOSX_DEPLOYMENT_TARGET: "10.9"
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-20.04
arch: aarch64
- os: ubuntu-20.04
arch: ppc64le
- os: ubuntu-20.04
arch: s390x
- os: ubuntu-20.04
arch: x86_64
- os: macOS-11
arch: arm64
- os: macOS-11
arch: x86_64
- os: windows-2019
arch: AMD64
- os: windows-2019
arch: x86
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up QEMU
if: runner.os == 'Linux'
uses: docker/setup-qemu-action@v3
with:
platforms: all
- name: Setup MSVC (32-bit)
if: ${{ matrix.arch == 'x86' }}
uses: bus1/cabuild/action/msdevshell@v1
with:
architecture: 'x86'
- name: Fix PATH on win32
# Avoid this in GHA: "ERROR: Found GNU link.exe instead of MSVC link.exe"
if: ${{ matrix.arch == 'x86' }}
run: |
rm /c/Program\ Files/Git/usr/bin/link.EXE
- name: Build wheels
uses: pypa/cibuildwheel@v2.16.5
env:
CIBW_ARCHS: ${{ matrix.arch }}
- uses: actions/upload-artifact@v4
with:
name: wheels_${{ matrix.os }}_${{ matrix.arch }}
path: ./wheelhouse/*.whl
build_sdist:
name: Build sdist
runs-on: ubuntu-latest
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies
run: |
python -m pip install build
- name: Build sdist
run: |
python -m build --sdist
- uses: actions/upload-artifact@v4
with:
name: sdist
path: dist/*.tar.gz
merge:
name: Merge build artifacts
runs-on: ubuntu-latest
needs: [build_wheels, build_sdist]
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@v4
with:
name: build_wheels_artifacts
delete-merged: true
contourpy-1.2.1/.github/workflows/test.yml 0000664 0000000 0000000 00000037223 14603035227 0020665 0 ustar 00root root 0000000 0000000 name: Test
on:
pull_request:
branches:
- main
- v[0-9]+.[0-9]+.x
push:
branches:
- main
- v[0-9]+.[0-9]+.x
workflow_dispatch:
schedule:
- cron: "42 01 * * SUN"
defaults:
run:
shell: bash
jobs:
pre-commit:
name: pre-commit
runs-on: ubuntu-latest
steps:
- name: Checkout source
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Run pre-commit
uses: pre-commit/action@v3.0.0
codebase:
name: codebase
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.11"]
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install contourpy
run: |
python -m pip install --upgrade pip
python -m pip install -v .[mypy,test]
- name: Install cppcheck
run: |
CPPCHECK_VERSION=2.11
CPPCHECK_TGZ=$CPPCHECK_VERSION.tar.gz
cd $RUNNER_TEMP
wget --no-verbose https://github.com/danmar/cppcheck/archive/refs/tags/$CPPCHECK_TGZ
tar xzf $CPPCHECK_TGZ
cd cppcheck-$CPPCHECK_VERSION
sudo make install MATCHCOMPILER=yes FILESDIR=/usr/share/cppcheck CXXFLAGS="-O2 -DNDEBUG" -j 2
- name: Smoke test
run: |
python -m pip list
python -c "from contourpy.util import build_config; from pprint import pprint; pprint(build_config())"
python -c "import contourpy as c; print('NDEBUG', c._contourpy.NDEBUG)"
- name: Run tests
run: |
python -m pytest -v -n auto --color=yes tests/test_codebase.py
test:
name: "${{ matrix.name }} ${{ matrix.python-version }} ${{ matrix.os }}"
runs-on: ${{ matrix.os }}
env:
# Required version of chromium used for Bokeh image tests.
CHROME_VER: "118.0.5993.88"
CHROME_REV: "chromium_2670"
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ["3.9", "3.10", "3.11", "3.12"]
name: ["Test"]
short-name: ["test"]
include:
- os: macos-14
python-version: "3.11"
name: "Test"
short-name: "test"
test-no-images: true
- os: macos-14
python-version: "3.12"
name: "Test"
short-name: "test"
test-no-images: true
# windows run exclude image tests
- os: windows-latest
python-version: "3.9"
name: "Test"
short-name: "test"
test-no-images: true
- os: windows-latest
python-version: "3.10"
name: "Test"
short-name: "test"
test-no-images: true
- os: windows-latest
python-version: "3.11"
name: "Test"
short-name: "test"
test-no-images: true
- os: windows-latest
python-version: "3.12"
name: "Test"
short-name: "test"
test-no-images: true
# Debug build including Python and C++ coverage.
- os: ubuntu-latest
python-version: "3.11"
name: "Test debug with coverage"
short-name: "test-debug"
coverage-files: "coverage.lcov,coverage.cpp"
debug: true
# Bokeh and text tests with Python (not C++) coverage.
- os: ubuntu-latest
python-version: "3.11"
name: "Test bokeh and text tests with coverage"
short-name: "test-bokeh"
coverage-files: "coverage.lcov"
test-text: true
# Test against numpy debug build.
- os: ubuntu-latest
python-version: "3.11"
name: "Test numpy debug"
short-name: "test-numpy-debug"
build-numpy-debug: true
# Test against earliest supported numpy
- os: ubuntu-latest
python-version: "3.9"
name: "Test earliest numpy"
short-name: "test-earliest-numpy"
extra-install-args: "numpy==1.20"
# Compile using C++11.
- os: ubuntu-latest
python-version: "3.11"
name: "Test C++11"
short-name: "test-c++11"
extra-install-args: "-Csetup-args=-Dcpp_std=c++11"
# PyPy only tested on ubuntu for speed, exclude big tests.
- os: ubuntu-latest
python-version: "pypy3.9"
name: "Test"
short-name: "test"
test-no-big: true
# Win32 test.
- os: windows-latest
python-version: "3.11"
name: "Win32"
short-name: "test-win32"
win32: true
test-no-images: true
# Test against matplotlib and numpy nightly wheels.
- os: ubuntu-latest
python-version: "3.12"
name: "Nightly wheels"
short-name: "test-nightlies"
nightly-wheels: true
- os: macos-latest
python-version: "3.12"
name: "Nightly wheels"
short-name: "test-nightlies"
nightly-wheels: true
- os: windows-latest
python-version: "3.12"
name: "Nightly wheels"
short-name: "test-nightlies"
nightly-wheels: true
test-no-images: true
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.win32 && 'x86' || 'x64' }}
- name: Setup MSVC (32-bit)
if: matrix.win32
uses: bus1/cabuild/action/msdevshell@v1
with:
architecture: 'x86'
- name: Fix PATH on win32
# Avoid this in GHA: "ERROR: Found GNU link.exe instead of MSVC link.exe"
if: matrix.win32
run: |
rm /c/Program\ Files/Git/usr/bin/link.EXE
- name: Install OS dependencies
if: matrix.debug
run: |
sudo apt update -yy
sudo apt install -yy lcov
- name: Install chromium for Bokeh tests
if: matrix.test-text
run: |
if [[ "$(chromium --version | cut -d' ' -f2)" = "$CHROME_VER" ]]; then
echo "Using pre-installed version of chromium"
else
URL=https://github.com/bokeh/chromium/raw/main/linux/$CHROME_VER
wget --no-verbose $URL/$CHROME_REV.assert
wget --no-verbose $URL/$CHROME_REV.snap
ls -l $CHROME_REV.*
sudo snap ack $CHROME_REV.assert
sudo snap install $CHROME_REV.snap
snap list chromium
snap info chromium
fi
which chromium
chromium --version
which chromedriver
chromedriver --version
- name: Build and install numpy from sdist
if: matrix.build-numpy
run: |
python -m pip install -v --no-binary=numpy numpy"
- name: Build and install numpy from sdist with debug asserts enabled
if: matrix.build-numpy-debug
run: |
python -m pip install -v --no-binary=numpy numpy -Csetup-args=-Dbuildtype=debug
- name: Pre-install Python dependencies
run: |
python -m pip install --upgrade pip
if [[ "${{ matrix.debug }}" != "" ]] || [[ "${{ matrix.coverage-files }}" != "" ]]
then
# Install requirements when not using build isolation.
python -m pip install -r build_requirements.txt ninja
fi
python -m pip list
- name: Install contourpy
run: |
if [[ "${{ matrix.debug }}" != "" ]]
then
echo "Install contourpy in debug editable mode with coverage"
python -m pip install -ve .[test] --no-build-isolation -Csetup-args=-Dbuildtype=debug -Csetup-args=-Db_coverage=true -Cbuilddir=build
elif [[ "${{ matrix.coverage-files }}" != "" ]]
then
echo "Install contourpy in editable mode with bokeh dependencies"
python -m pip install -ve .[bokeh,test] --no-build-isolation -Cbuilddir=build
elif [[ "${{ matrix.test-no-images }}" != "" ]]
then
echo "Install contourpy with non-image-generating test dependencies"
python -m pip install -v .[test-no-images]
else
echo "Install contourpy with standard test dependencies"
python -m pip install -v .[test] ${{ matrix.extra-install-args }}
fi
- name: Install nightly wheels
if: matrix.nightly-wheels
run: |
python -m pip install --pre --upgrade --no-deps --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple matplotlib numpy
- name: Install numpy 2 pre-release
run: |
if [[ "${{ matrix.short-name }}" == "test" ]]
then
# This is temporary until there are full releases of numpy 2 and matplotlib supporting numpy 2
python -m pip install --pre --upgrade --no-deps --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple matplotlib
python -m pip install --pre --upgrade --no-deps numpy
fi
- name: Smoke test
run: |
python -m pip list
python -c "from contourpy.util import build_config; from pprint import pprint; pprint(build_config())"
python -c "import contourpy as c; print('NDEBUG', c._contourpy.NDEBUG)"
- name: Run tests
run: |
if [[ "${{ matrix.debug }}" != "" ]]
then
echo "Run normal tests with coverage"
python -m pytest -v -n auto --color=yes tests/ --cov=lib --cov-report=lcov
elif [[ "${{ matrix.test-text }}" != "" ]]
then
echo "Run normal and text tests with coverage"
python -m pytest -v -n auto --color=yes -rP tests/test_bokeh_renderer.py tests/test_renderer.py --runtext --driver-path=/snap/bin/chromium.chromedriver --cov=lib --cov-report=lcov
elif [[ "${{ matrix.test-no-images }}" != "" ]]
then
echo "Run only tests that do not generate images"
python -m pytest -v -n auto --color=yes tests/ -k "not image"
elif [[ "${{ matrix.test-no-big }}" != "" ]]
then
echo "Run all tests except big ones"
python -m pytest -v -n auto --color=yes tests/ -k "not big"
else
echo "Run all tests"
python -m pytest -v -n auto --color=yes tests/
fi
- name: Collect C++ coverage
if: matrix.debug
run: |
lcov --output-file coverage.cpp --capture --directory build
lcov --output-file coverage.cpp --extract coverage.cpp $PWD/src/"*"
- name: Upload coverage
if: matrix.coverage-files
uses: codecov/codecov-action@v3
with:
files: ${{ matrix.coverage-files }}
verbose: true
- name: Collect test image failures
if: always()
run: |
if [[ -e result_images ]]
then
DIR="test-artifacts/${{ matrix.os }}_${{ matrix.python-version }}_${{ github.run_id }}"
mkdir -p ${DIR}
mv result_images/* ${DIR}/
fi
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.short-name }}_${{ matrix.os }}_${{ matrix.python-version }}
path: result_images/
test-in-docker:
# In-docker tests are either emulated hardware or musllinux
name: In docker ${{ matrix.arch }} ${{ matrix.manylinux_version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
# musllinux x86_64.
- arch: x86_64
manylinux_version: musllinux
image: musllinux_1_1_x86_64
venv: venv
test: test
# musllinux aarch64.
- arch: aarch64
manylinux_version: musllinux
image: musllinux_1_1_aarch64
venv: venv
test: test-no-images
# ppc64le and s390x: dependencies are conda packages.
- arch: ppc64le
manylinux_version: manylinux2014
image: manylinux2014_ppc64le
venv: conda
test: test
- arch: s390x
manylinux_version: manylinux2014
image: manylinux2014_s390x
venv: conda
test: test
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up QEMU
if: ${{ matrix.arch }} != "x86_64"
uses: docker/setup-qemu-action@v3
with:
platforms: all
- name: Run inside docker
uses: addnab/docker-run-action@v3
with:
image: quay.io/pypa/${{ matrix.image }}:latest
options: -v ${{ github.workspace }}:/work -e ARCH=${{ matrix.arch }} -e VENV=${{ matrix.venv}} -e TEST=${{ matrix.test }}
shell: bash
run: |
echo "-------------------- start --------------------"
set -eu
uname -a
cd /work
if [[ $VENV == "venv" ]]
then
echo "==> Create virtual environment"
/opt/python/cp311-cp311/bin/python -m venv venv
. venv/bin/activate
which python
python --version
else
echo "==> Install conda"
cd /tmp
curl -LO "http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-$ARCH.sh"
bash Miniconda3-latest-Linux-$ARCH.sh -p /work/venv -b
rm Miniconda3-latest-Linux-$ARCH.sh
cd /work
echo "==> Activate conda in this shell"
. /work/venv/etc/profile.d/conda.sh
echo "==> Create and activate conda environment"
conda create -n my_env -q python=3.11
conda activate my_env
echo "==> Install conda dependencies"
conda install -q numpy matplotlib Pillow
conda list
fi
echo "==> Upgrade pip"
python -m pip install --upgrade pip
echo "==> Install contourpy with test dependencies"
python -m pip install -v .[$TEST]
python -m pip list
python -c "from contourpy.util import build_config; from pprint import pprint; pprint(build_config())"
if [[ $TEST == "test-no-images" ]]
then
echo "==> Run non-image and non-big tests"
python -m pytest -v -n auto --color=yes tests/ -k "not (big or image)"
else
echo "==> Run tests except 'big' ones as on emulated hardware"
python -m pytest -v -n auto --color=yes tests/ -k "not big"
fi
echo "-------------------- end --------------------"
- name: Collect test image failures
if: always()
run: |
if [[ -e result_images ]]
then
DIR="test-artifacts/docker_${{ matrix.arch }}_${{ matrix.manylinux_version }}"
mkdir -p ${DIR}
mv result_images/* ${DIR}/
fi
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: docker_${{ matrix.arch }}_${{ matrix.manylinux_version }}
path: result_images/
contourpy-1.2.1/.gitignore 0000664 0000000 0000000 00000000371 14603035227 0015550 0 ustar 00root root 0000000 0000000 __pycache__/
build/
dist/
contourpy.egg-info/
result_images/
wheelhouse/
.mypy_cache/
.pytest_cache/
.ruff_cache/
*.so
*.pyd
.vscode/
.asv/
.mesonpy-native-file.ini
generated/
benchmarks/*.png
benchmarks/*.svg
build_config.py
.coverage
docs/_build/
contourpy-1.2.1/.pre-commit-config.yaml 0000664 0000000 0000000 00000001567 14603035227 0020051 0 ustar 00root root 0000000 0000000 exclude: \.(png|svg)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-builtin-literals
- id: check-case-conflict
- id: check-docstring-first
- id: check-merge-conflict
- id: check-toml
- id: check-yaml
- id: debug-statements
- id: detect-private-key
- id: end-of-file-fixer
- id: mixed-line-ending
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.9
hooks:
- id: ruff
args: [--fix]
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
additional_dependencies:
- tomli
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade
ci:
autofix_prs: false
contourpy-1.2.1/.readthedocs.yaml 0000664 0000000 0000000 00000000375 14603035227 0017013 0 ustar 00root root 0000000 0000000 version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
python:
install:
- method: pip
path: .
extra_requirements:
- bokeh
- docs
- test
sphinx:
configuration: docs/conf.py
fail_on_warning: true
contourpy-1.2.1/LICENSE 0000664 0000000 0000000 00000002776 14603035227 0014600 0 ustar 00root root 0000000 0000000 BSD 3-Clause License
Copyright (c) 2021-2024, ContourPy Developers.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
contourpy-1.2.1/README.md 0000664 0000000 0000000 00000004126 14603035227 0015041 0 ustar 00root root 0000000 0000000
ContourPy is a Python library for calculating contours of 2D quadrilateral grids. It is written in C++11 and wrapped using pybind11.
It contains the 2005 and 2014 algorithms used in Matplotlib as well as a newer algorithm that includes more features and is available in both serial and multithreaded versions. It provides an easy way for Python libraries to use contouring algorithms without having to include Matplotlib as a dependency.
* **Documentation**: https://contourpy.readthedocs.io
* **Source code**: https://github.com/contourpy/contourpy
| | |
| --- | --- |
| Latest release | [](https://pypi.python.org/pypi/contourpy) [](https://anaconda.org/conda-forge/contourpy) [](https://anaconda.org/anaconda/contourpy) |
| Downloads | [](https://pepy.tech/project/contourpy) [](https://anaconda.org/conda-forge/contourpy) [](https://anaconda.org/anaconda/contourpy) |
| Python version | [](https://pypi.org/project/contourpy/) |
| Coverage | [](https://app.codecov.io/gh/contourpy/contourpy) |
contourpy-1.2.1/README_simple.md 0000664 0000000 0000000 00000003637 14603035227 0016420 0 ustar 00root root 0000000 0000000
ContourPy is a Python library for calculating contours of 2D quadrilateral grids. It is written in C++11 and wrapped using pybind11.
It contains the 2005 and 2014 algorithms used in Matplotlib as well as a newer algorithm that includes more features and is available in both serial and multithreaded versions. It provides an easy way for Python libraries to use contouring algorithms without having to include Matplotlib as a dependency.
* **Documentation**: https://contourpy.readthedocs.io
* **Source code**: https://github.com/contourpy/contourpy
| | |
| --- | --- |
| Latest release | [](https://pypi.python.org/pypi/contourpy) [](https://anaconda.org/conda-forge/contourpy) [](https://anaconda.org/anaconda/contourpy) |
| Downloads | [](https://pepy.tech/project/contourpy) [](https://anaconda.org/conda-forge/contourpy) [](https://anaconda.org/anaconda/contourpy) |
| Python version | [](https://pypi.org/project/contourpy/) |
| Coverage | [](https://app.codecov.io/gh/contourpy/contourpy) |
contourpy-1.2.1/benchmarks/ 0000775 0000000 0000000 00000000000 14603035227 0015674 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/benchmarks/README.md 0000664 0000000 0000000 00000002476 14603035227 0017164 0 ustar 00root root 0000000 0000000 Benchmarking
============
`contourpy` uses ASV (https://asv.readthedocs.io) for benchmarking.
Installing ASV
--------------
ASV creates virtualenvs to run benchmarks in. Before using it you need to
```
pip install asv==0.4.2 virtualenv
```
or the `conda` equivalent.
Running benchmarks
------------------
To run all benchmarks against the default `main` branch:
```
cd benchmarks
asv run
```
The first time this is run it creates a machine file to store information about your machine. Then a virtual environment is created and each benchmark is run multiple times to obtain a statistically valid benchmark time.
To list the benchmark timings stored for the `main` branch use:
```
asv show main
```
ASV ships with its own simple webserver to interactively display the results in a webbrowser. To use this:
```
asv publish
asv preview
```
and then open a web browser at the URL specified.
If you want to quickly run all benchmarks once only to check for errors, etc, use:
```
asv dev
```
instead of `asv run`.
Configuration
-------------
ASV configuration information is stored in `benchmarks/asv.conf.json`. This includes a list of branches to benchmark. If you are using a feature branch and wish to benchmark the code in that branch rather than `main`, edit `asv.conf.json` to change the line
```
"branches": ["main"],
```
contourpy-1.2.1/benchmarks/asv.conf.json 0000664 0000000 0000000 00000015207 14603035227 0020311 0 ustar 00root root 0000000 0000000 {
// The version of the config file format. Do not change, unless
// you know what you are doing.
"version": 1,
// The name of the project being benchmarked
"project": "contourpy",
// The project's homepage
"project_url": "http://github.com/contourpy/contourpy",
// The URL or local path of the source code repository for the
// project being benchmarked
"repo": "..",
// The Python project's subdirectory in your repo. If missing or
// the empty string, the project is assumed to be located at the root
// of the repository.
// "repo_subdir": "",
// Customizable commands for building, installing, and
// uninstalling the project. See asv.conf.json documentation.
//
// "install_command": ["in-dir={env_dir} python -mpip install {wheel_file}"],
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
// "build_command": [
// "python setup.py build",
// "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
// ],
"build_command": [
"python -m pip install --upgrade build pip",
"python -m build --wheel -o {build_cache_dir} {build_dir}"
],
// List of branches to benchmark. If not provided, defaults to "master"
// (for git) or "default" (for mercurial).
"branches": ["main"],
// The DVCS being used. If not set, it will be automatically
// determined from "repo" by looking at the protocol in the URL
// (if remote), or by looking for special directories, such as
// ".git" (if local).
"dvcs": "git",
// The tool to use to create environments. May be "conda",
// "virtualenv" or other value depending on the plugins in use.
// If missing or the empty string, the tool will be automatically
// determined by looking for tools on the PATH environment
// variable.
"environment_type": "virtualenv",
// timeout in seconds for installing any dependencies in environment
// defaults to 10 min
//"install_timeout": 600,
// the base URL to show a commit for the project.
"show_commit_url": "http://github.com/contourpy/contourpy/commit/",
// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
//"pythons": ["3.9"],
// The list of conda channel names to be searched for benchmark
// dependency packages in the specified order
// "conda_channels": ["conda-forge", "defaults"],
// The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty
// list or empty string indicates to just test against the default
// (latest) version. null indicates that the package is to not be
// installed. If the package to be tested is only available from
// PyPi, and the 'environment_type' is conda, then you can preface
// the package name by 'pip+', and the package will be installed via
// pip (with all the conda available packages installed first,
// followed by the pip installed packages).
"matrix": {
"matplotlib": [],
"numpy": []
},
// Combinations of libraries/python versions can be excluded/included
// from the set to test. Each entry is a dictionary containing additional
// key-value pairs to include/exclude.
//
// An exclude entry excludes entries where all values match. The
// values are regexps that should match the whole string.
//
// An include entry adds an environment. Only the packages listed
// are installed. The 'python' key is required. The exclude rules
// do not apply to includes.
//
// In addition to package names, the following keys are available:
//
// - python
// Python version, as in the *pythons* variable above.
// - environment_type
// Environment type, as above.
// - sys_platform
// Platform, as in sys.platform. Possible values for the common
// cases: 'linux2', 'win32', 'cygwin', 'darwin'.
//
// "exclude": [
// {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
// {"environment_type": "conda", "six": null}, // don't run without six on conda
// ],
//
// "include": [
// // additional env for python2.7
// {"python": "2.7", "numpy": "1.8"},
// // additional env if run on windows+conda
// {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
// ],
// The directory (relative to the current directory) that benchmarks are
// stored in. If not provided, defaults to "benchmarks"
"benchmark_dir": "benchmarks",
// The directory (relative to the current directory) to cache the Python
// environments in. If not provided, defaults to "env"
"env_dir": ".asv/env",
// The directory (relative to the current directory) that raw benchmark
// results are stored in. If not provided, defaults to "results".
"results_dir": ".asv/results",
// The directory (relative to the current directory) that the html tree
// should be written to. If not provided, defaults to "html".
"html_dir": ".asv/html",
// The number of characters to retain in the commit hashes.
// "hash_length": 8,
// `asv` will cache results of the recent builds in each
// environment, making them faster to install next time. This is
// the number of builds to keep, per environment.
// "build_cache_size": 2,
// The commits after which the regression search in `asv publish`
// should start looking for regressions. Dictionary whose keys are
// regexps matching to benchmark names, and values corresponding to
// the commit (exclusive) after which to start looking for
// regressions. The default is to start from the first commit
// with results. If the commit is `null`, regression detection is
// skipped for the matching benchmark.
//
// "regressions_first_commits": {
// "some_benchmark": "352cdf", // Consider regressions only after this commit
// "another_benchmark": null, // Skip regression detection altogether
// },
// The thresholds for relative change in results, after which `asv
// publish` starts reporting regressions. Dictionary of the same
// form as in ``regressions_first_commits``, with values
// indicating the thresholds. If multiple entries match, the
// maximum is taken. If no entry matches, the default is 5%.
//
// "regressions_thresholds": {
// "some_benchmark": 0.01, // Threshold of 1%
// "another_benchmark": 0.5, // Threshold of 50%
// },
}
contourpy-1.2.1/benchmarks/benchmarks/ 0000775 0000000 0000000 00000000000 14603035227 0020011 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/benchmarks/benchmarks/__init__.py 0000664 0000000 0000000 00000000000 14603035227 0022110 0 ustar 00root root 0000000 0000000 contourpy-1.2.1/benchmarks/benchmarks/bench_base.py 0000664 0000000 0000000 00000001717 14603035227 0022442 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from typing import TYPE_CHECKING, Any
import numpy as np
from contourpy.util.data import random, simple
if TYPE_CHECKING:
import numpy.typing as npt
class BenchBase:
levels: npt.NDArray[np.float64]
timeout: int = 120 # Some rendering benchmarks can take more than the default minute.
x: npt.NDArray[np.float64]
y: npt.NDArray[np.float64]
z: npt.NDArray[np.float64] | np.ma.MaskedArray[Any, Any]
def set_xyz_and_levels(self, dataset: str, n: int, want_mask: bool) -> None:
if dataset == "random":
mask_fraction = 0.05 if want_mask else 0.0
self.x, self.y, self.z = random((n, n), mask_fraction=mask_fraction)
self.levels = np.arange(0.0, 1.01, 0.1)
elif dataset == "simple":
self.x, self.y, self.z = simple((n, n), want_mask=want_mask)
self.levels = np.arange(-1.0, 1.01, 0.1)
else:
raise NotImplementedError
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_mpl20xx.py 0000664 0000000 0000000 00000002362 14603035227 0024356 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchFilledMpl20xx(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["mpl2005", "mpl2014"], datasets(), [FillType.OuterCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_mpl20xx(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
if name == "mpl2005" and corner_mask is True:
raise NotImplementedError # Does not support corner_mask=True
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_mpl20xx_render.py 0000664 0000000 0000000 00000002755 14603035227 0025723 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchFilledMpl20xxRender(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["mpl2005", "mpl2014"], datasets(), [FillType.OuterCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_mpl20xx_render(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
if name == "mpl2005" and corner_mask is True:
raise NotImplementedError # Does not support corner_mask=True
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
renderer = MplTestRenderer()
for i in range(len(self.levels)-1):
filled = cont_gen.filled(self.levels[i], self.levels[i+1])
renderer.filled(filled, fill_type, color=f"C{i}")
renderer.save(f"filled_{name}_{corner_mask}_{fill_type}_{n}.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_serial.py 0000664 0000000 0000000 00000002147 14603035227 0024324 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, fill_types, problem_sizes
class BenchFilledSerial(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["serial"], datasets(), fill_types(), corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_serial(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_serial_chunk.py 0000664 0000000 0000000 00000002430 14603035227 0025507 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, fill_types, total_chunk_counts
class BenchFilledSerialChunk(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int], list[int]] = (
["serial"], datasets(), fill_types(), corner_masks(), [1000], total_chunk_counts(),
)
param_names: tuple[str, ...] = (
"name", "dataset", "fill_type", "corner_mask", "n", "total_chunk_count",
)
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
total_chunk_count: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_serial_chunk(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
total_chunk_count: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask), total_chunk_count=total_chunk_count,
)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_serial_quad_as_tri.py 0000664 0000000 0000000 00000002212 14603035227 0026670 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchFilledSerialQuadAsTri(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["serial"], datasets(), [FillType.OuterCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_serial_quad_as_tri(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask), quad_as_tri=True,
)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_serial_quad_as_tri_render.py 0000664 0000000 0000000 00000002605 14603035227 0030235 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchFilledSerialQuadAsTriRender(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["serial"], datasets(), [FillType.OuterCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_serial_quad_as_tri_render(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask), quad_as_tri=True,
)
renderer = MplTestRenderer()
for i in range(len(self.levels)-1):
filled = cont_gen.filled(self.levels[i], self.levels[i+1])
renderer.filled(filled, fill_type, color=f"C{i}")
renderer.save(f"filled_{name}_{corner_mask}_{fill_type}_{n}.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_serial_render.py 0000664 0000000 0000000 00000002542 14603035227 0025662 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, fill_types, problem_sizes
class BenchFilledSerialRender(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int]] = (
["serial"], datasets(), fill_types(), corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "fill_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_serial_render(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
renderer = MplTestRenderer()
for i in range(len(self.levels)-1):
filled = cont_gen.filled(self.levels[i], self.levels[i+1])
renderer.filled(filled, fill_type, color=f"C{i}")
renderer.save(f"filled_{name}_{corner_mask}_{fill_type}_{n}.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_filled_threaded.py 0000664 0000000 0000000 00000002706 14603035227 0024626 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, contour_generator
from .bench_base import BenchBase
from .util_bench import (
corner_mask_to_bool,
corner_masks,
datasets,
fill_types,
problem_sizes,
thread_counts,
)
class BenchFilledThreaded(BenchBase):
params: tuple[list[str], list[str], list[FillType], list[str | bool], list[int], list[int],
list[int]] = (
["threaded"], datasets(), fill_types(), corner_masks(), problem_sizes(), [40],
thread_counts(),
)
param_names: tuple[str, ...] = (
"name", "dataset", "fill_type", "corner_mask", "n", "total_chunk_count", "thread_count",
)
def setup(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
total_chunk_count: int, thread_count: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_threaded(
self, name: str, dataset: str, fill_type: FillType, corner_mask: str | bool, n: int,
total_chunk_count: int, thread_count: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type,
corner_mask=corner_mask_to_bool(corner_mask), total_chunk_count=total_chunk_count,
thread_count=thread_count,
)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_mpl20xx.py 0000664 0000000 0000000 00000002325 14603035227 0024230 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchLinesMpl20xx(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["mpl2005", "mpl2014"], datasets(), [LineType.SeparateCode], corner_masks(),
problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_mpl20xx(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
if name == "mpl2005" and corner_mask is True:
raise NotImplementedError # Does not support corner_mask=True
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
for level in self.levels:
cont_gen.lines(level)
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_mpl20xx_render.py 0000664 0000000 0000000 00000002732 14603035227 0025571 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchLinesMpl20xxRender(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["mpl2005", "mpl2014"], datasets(), [LineType.SeparateCode], corner_masks(),
problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_mpl20xx_render(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
if name == "mpl2005" and corner_mask is True:
raise NotImplementedError # Does not support corner_mask=True
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
renderer = MplTestRenderer()
for i, level in enumerate(self.levels):
lines = cont_gen.lines(level)
renderer.lines(lines, line_type, color=f"C{i}")
renderer.save(f"lines_{name}_{corner_mask}_{line_type}_{n}.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_serial.py 0000664 0000000 0000000 00000002077 14603035227 0024201 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, line_types, problem_sizes
class BenchLinesSerial(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["serial"], datasets(), line_types(), corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_serial(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
for level in self.levels:
cont_gen.lines(level)
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_serial_chunk.py 0000664 0000000 0000000 00000002360 14603035227 0025364 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, line_types, total_chunk_counts
class BenchLinesSerialChunk(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int], list[int]] = (
["serial"], datasets(), line_types(), corner_masks(), [1000], total_chunk_counts(),
)
param_names: tuple[str, ...] = (
"name", "dataset", "line_type", "corner_mask", "n", "total_chunk_count",
)
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
total_chunk_count: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_serial_chunk(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
total_chunk_count: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask), total_chunk_count=total_chunk_count,
)
for level in self.levels:
cont_gen.lines(level)
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_serial_quad_as_tri.py 0000664 0000000 0000000 00000002145 14603035227 0026550 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchLinesSerialQuadAsTri(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["serial"], datasets(), [LineType.SeparateCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_serial_quad_as_tri(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask), quad_as_tri=True,
)
for level in self.levels:
cont_gen.lines(level)
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_serial_quad_as_tri_render.py 0000664 0000000 0000000 00000002557 14603035227 0030116 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, problem_sizes
class BenchLinesSerialQuadAsTriRender(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["serial"], datasets(), [LineType.SeparateCode], corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_serial_quad_as_tri_render(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask), quad_as_tri=True,
)
renderer = MplTestRenderer()
for i, level in enumerate(self.levels):
lines = cont_gen.lines(level)
renderer.lines(lines, line_type, color=f"C{i}")
renderer.save(f"lines_{name}_{corner_mask}_{line_type}_{n}_True.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_serial_render.py 0000664 0000000 0000000 00000002504 14603035227 0025533 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from contourpy.util.mpl_renderer import MplTestRenderer
from .bench_base import BenchBase
from .util_bench import corner_mask_to_bool, corner_masks, datasets, line_types, problem_sizes
class BenchLinesSerialRender(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int]] = (
["serial"], datasets(), line_types(), corner_masks(), problem_sizes(),
)
param_names: tuple[str, ...] = ("name", "dataset", "line_type", "corner_mask", "n")
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_serial_render(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask),
)
renderer = MplTestRenderer()
for i, level in enumerate(self.levels):
lines = cont_gen.lines(level)
renderer.lines(lines, line_type, color=f"C{i}")
renderer.save(f"lines_{name}_{corner_mask}_{line_type}_{n}.png")
contourpy-1.2.1/benchmarks/benchmarks/bench_lines_threaded.py 0000664 0000000 0000000 00000002636 14603035227 0024503 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import LineType, contour_generator
from .bench_base import BenchBase
from .util_bench import (
corner_mask_to_bool,
corner_masks,
datasets,
line_types,
problem_sizes,
thread_counts,
)
class BenchLinesThreaded(BenchBase):
params: tuple[list[str], list[str], list[LineType], list[str | bool], list[int], list[int],
list[int]] = (
["threaded"], datasets(), line_types(), corner_masks(), problem_sizes(), [40],
thread_counts(),
)
param_names: tuple[str, ...] = (
"name", "dataset", "line_type", "corner_mask", "n", "total_chunk_count", "thread_count",
)
def setup(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
total_chunk_count: int, thread_count: int,
) -> None:
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_lines_threaded(
self, name: str, dataset: str, line_type: LineType, corner_mask: str | bool, n: int,
total_chunk_count: int, thread_count: int,
) -> None:
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, line_type=line_type,
corner_mask=corner_mask_to_bool(corner_mask), total_chunk_count=total_chunk_count,
thread_count=thread_count,
)
for level in self.levels:
cont_gen.lines(level)
contourpy-1.2.1/benchmarks/benchmarks/util_bench.py 0000664 0000000 0000000 00000001517 14603035227 0022503 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from contourpy import FillType, LineType, max_threads
def corner_mask_to_bool(corner_mask: str | bool) -> bool:
if isinstance(corner_mask, bool):
return corner_mask
else:
return False
def corner_masks() -> list[str | bool]:
return ["no mask", False, True]
def datasets() -> list[str]:
return ["simple", "random"]
def fill_types() -> list[FillType]:
return list(FillType.__members__.values())
def line_types() -> list[LineType]:
return list(LineType.__members__.values())
def problem_sizes() -> list[int]:
return [10, 30, 100, 300, 1000]
def thread_counts() -> list[int]:
thread_counts = [1, 2, 4, 6, 8]
return list(filter(lambda n: n <= max(max_threads(), 1), thread_counts))
def total_chunk_counts() -> list[int]:
return [4, 12, 40, 120]
contourpy-1.2.1/benchmarks/loader.py 0000664 0000000 0000000 00000006633 14603035227 0017524 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from copy import deepcopy
from datetime import datetime
from typing import TYPE_CHECKING, Any
from asv.benchmarks import Benchmarks
from asv.config import Config
from asv.results import Results, iter_results_for_machine
from asv.statistics import get_err
from contourpy import FillType, LineType
if TYPE_CHECKING:
from asv.benchmark import Benchmark
class Loader:
_config: Config
_benchmarks: Benchmarks
_machine: str
_results: Results
def __init__(self, machine: str | None = None) -> None:
self._config = Config.load()
self._benchmarks = Benchmarks.load(self._config)
if machine is None:
import platform
machine = platform.uname()[1]
latest_results = None
for results in iter_results_for_machine(self._config.results_dir, machine):
if latest_results is None or results.date > latest_results.date:
latest_results = results
if latest_results is None:
raise RuntimeError(f"No results found for machine {machine}")
print(latest_results.commit_hash, datetime.fromtimestamp(latest_results.date/1000.0))
self._results = latest_results
self._machine = machine
def _find_benchmark_by_name(self, name: str) -> Benchmark:
for k, v in self._benchmarks.items():
if k.endswith(name):
return v
raise RuntimeError(f"Cannot find benchmark with name {name}")
@property
def commit(self) -> str:
return self._results.commit_hash # type: ignore[no-any-return]
def get(self, benchmark_name: str, **kwargs: Any) -> dict[str, Any]:
benchmark = self._find_benchmark_by_name(benchmark_name)
param_names = benchmark["param_names"]
params = deepcopy(benchmark["params"])
for name, value in kwargs.items():
index = param_names.index(name)
if isinstance(value, list):
params[index] = [repr(item) for item in value]
else:
params[index] = [repr(value)]
stats = self._results.get_result_stats(benchmark["name"], params)
values = self._results.get_result_value(benchmark["name"], params)
ret = {}
for name, param in zip(param_names, params):
for i, item in enumerate(param):
if isinstance(item, str):
if item[0] == "'" and item[-1] == "'":
item = item[1:-1]
if item.startswith(" str:
return self._machine
contourpy-1.2.1/benchmarks/plot_benchmarks.py 0000664 0000000 0000000 00000031530 14603035227 0021423 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import re
from typing import TYPE_CHECKING, Any
from asv.util import human_value
from loader import Loader
import matplotlib.pyplot as plt
import numpy as np
from contourpy import FillType, LineType
if TYPE_CHECKING:
from matplotlib.axes import Axes
from matplotlib.patches import Rectangle
# Default fill/line types that exist in all algorithms.
default_fill_type = FillType.OuterCode
default_line_type = LineType.SeparateCode
def capital_letters_to_newlines(text: str) -> str:
return re.sub(r"([a-z0-9])([A-Z])", r"\1\n\2", text)
def get_corner_mask_label(corner_mask: bool | str) -> str:
if corner_mask == "no mask":
return "no mask"
else:
return f"corner_mask={corner_mask}"
def get_style(name: str, corner_mask: bool | str) -> tuple[str, str, str, float]:
# Colors from Paul Tol's colorblind friendly light scheme (https://personal.sron.nl/~pault)
colors = {
"mpl2005": "#eedd88", # light yellow.
"mpl2014": "#ee8866", # orange.
"serial": "#77aadd", # light blue.
"threaded": "#99ddff", # light cyan.
}
hatches = {
"no mask": "",
False: "---",
True: "///",
}
edge_color = "#222222"
return colors[name], edge_color, hatches[corner_mask], 0.5
def with_time_units(value: float, error: float | None = None) -> str:
# ASV's human_value() doesn't put a space between numbers and units.
# See e.g. https://physics.nist.gov/cuu/Units/checklist.html
with_units = human_value(value, "seconds", error)
return re.sub(r"(?<=\S)([a-zA-Z]+)$", r" \1", with_units)
def by_name_and_type(loader: Loader, filled: bool, dataset: str, render: bool, n: int) -> None:
show_error = False
corner_masks: list[str | bool] = ["no mask", False, True]
filled_str = "filled" if filled else "lines"
title = f"{filled_str} {dataset} n={n} {'(calculate and render)' if render else ''}"
nbars = 3
width = 1.0 / (nbars + 1)
ntypes = len(FillType.__members__) if filled else len(LineType.__members__)
cache = {} # Results cache
for mode in ["light", "dark"]:
plt.style.use("default" if mode == "light" else "dark_background")
fig, ax = plt.subplots(figsize=(8.5, 6))
xticklabels = []
for name in ["mpl2005", "mpl2014", "serial"]:
bname = "serial" if name == "serial" else "mpl20xx"
benchmarks_name = f"time_{filled_str}_{bname}{'_render' if render else ''}"
if name == "serial":
xs = 2 + np.arange(ntypes)
else:
xs = np.array(0 if name == "mpl2005" else 1)
i = 0
for corner_mask in corner_masks:
kwargs = {"name": name, "dataset": dataset, "corner_mask": corner_mask, "n": n}
results = loader.get(benchmarks_name, **kwargs)
if results["name"] != name:
raise RuntimeError(f"Loader returning wrong name: {name} != {results['name']}")
if results["mean"] is None:
continue
name = results["name"]
mean = results["mean"]
error = results["error"]
types = results["fill_type" if filled else "line_type"]
if not isinstance(types, list):
types = [types]
if mode == "light":
for m, t in zip(mean, types):
cache[(name, t, corner_mask)] = m
color, edge_color, hatch, line_width = get_style(name, corner_mask)
offset = width*(i - 0.5*(nbars - 1))
label = f"{name} {get_corner_mask_label(corner_mask)}"
yerr = error if show_error else None
mean = np.asarray(mean, dtype=np.float64) # None -> nan.
if corner_mask == "no mask":
xticklabels += [name + str(t).split(".")[1] for t in types]
rects = ax.bar(
xs + offset, mean, width, yerr=yerr, color=color, edgecolor=edge_color,
hatch=hatch, linewidth=line_width, capsize=4, label=label, zorder=3)
if show_error:
labels = [with_time_units(m, s) for m, s in zip(mean, error)]
else:
labels = [with_time_units(m) for m in mean]
ax.bar_label(rects, labels, padding=5, rotation="vertical", size="medium")
i += 1
if filled and not render:
if dataset == "random":
ax.set_ylim(0, 2.65)
else:
ax.set_ylim(0, 0.3)
elif filled and render and dataset == "simple":
ax.set_ylim(0, 0.425)
elif not filled and render and dataset == "simple":
ax.set_ylim(0, 0.4)
else:
ax.set_ylim(0, ax.get_ylim()[1]*1.1) # Magic number.
loc: str | tuple[float, float] = "best"
if not filled and render and dataset == "random":
loc = "lower left"
elif render and dataset == "simple":
loc = "lower right"
elif filled and render and dataset == "random":
loc = (0.51, 0.6)
ax.legend(loc=loc, framealpha=0.9)
ax.grid(axis="y", c="k" if mode == "light" else "w", alpha=0.2)
ax.set_xticks(np.arange(ntypes+2))
xticklabels = list(map(capital_letters_to_newlines, xticklabels))
ax.set_xticklabels(xticklabels)
ax.set_ylabel("Time (seconds)")
ax.set_title(title)
for spine in ax.spines.values():
spine.set_zorder(5)
fig.tight_layout()
filename = f"{filled_str}_{dataset}_{n}{'_render' if render else ''}_{mode}.svg"
#print(f"Saving {filename}")
fig.savefig(filename, transparent=True)
# Print comparison of different algorithms using mpl default type.
print(f"Times and speedups: {filled_str} dataset={dataset} render={render}")
default_type = FillType.OuterCode if filled else LineType.SeparateCode
for target in ["mpl2005", "mpl2014"]:
names = ["serial", target]
for m in ("no mask", False, True):
if names[1] == "mpl2005" and m is True:
continue
times = [cache[(name, default_type, m)] for name in names]
ratio = times[0]/times[1]
print(f" {ratio:.3f}, {1.0/ratio:.3f}, {names[0]}:{names[1]}, {default_type}, {m}")
print()
# Print comparison of different line/fill types for serial algorithm.
name = "serial"
for t in (FillType.__members__.values() if filled else LineType.__members__.values()):
if t == default_type:
continue
for m in ("no mask", False, True):
times = [cache[(name, t, m)], cache[(name, default_type, m)]]
ratio = times[0]/times[1]
print(f" {ratio:.3f}, {1.0/ratio:.3f}, {name}, {t}:{default_type}, {m}")
print()
def comparison_two_benchmarks(
loader: Loader, filled: bool, dataset: str, varying: str, varying_values: list[float],
) -> None:
if varying == "thread_count":
file_prefix = "threaded"
elif varying == "total_chunk_count":
file_prefix = "chunk"
else:
raise RuntimeError(f"Invalid varying field '{varying}'")
show_error = False
show_speedups = (varying == "thread_count")
n = 1000
corner_mask = "no mask"
filled_str = "filled" if filled else "lines"
kwargs: dict[str, Any] = {"dataset":dataset, "corner_mask": corner_mask, "n": n}
if varying == "thread_count":
kwargs["total_chunk_count"] = 40
name0 = "serial"
name1 = "threaded" if varying == "thread_count" else "serial"
kwargs["name"] = name0
if varying == "thread_count":
benchmarks_name = f"time_{filled_str}_{name0}_chunk"
else:
benchmarks_name = f"time_{filled_str}_{name0}"
results = loader.get(benchmarks_name, **kwargs)
fill_or_line_type = results["fill_type"] if filled else results["line_type"]
ntype = len(fill_or_line_type)
mean0 = results["mean"]
error0 = results["error"]
kwargs["name"] = name1
kwargs[varying] = varying_values
if varying == "thread_count":
benchmarks_name = f"time_{filled_str}_{name1}"
else:
benchmarks_name = f"time_{filled_str}_{name1}_chunk"
results = loader.get(benchmarks_name, **kwargs)
mean1 = results["mean"]
error1 = results["error"]
varying_count = len(varying_values)
xs = np.arange(ntype*(varying_count+2))
xs.shape = (ntype, varying_count+2)
speedups = np.expand_dims(mean0, axis=1) / np.reshape(mean1, (ntype, varying_count))
speedups_flat = speedups.ravel()
def in_bar_label(ax: Axes, rect: Rectangle, value: str) -> None:
kwargs: dict[str, Any] = {"fontsize": "medium", "ha": "center", "va": "bottom",
"color": "k"}
if varying != "thread_count":
kwargs["rotation"] = "vertical"
ax.annotate(value, (rect.xy[0] + 0.5*rect.get_width(), rect.xy[1]), **kwargs)
for mode in ["light", "dark"]:
plt.style.use("default" if mode == "light" else "dark_background")
fig, ax = plt.subplots(figsize=(8.5, 6))
# Serial bars.
color, edge_color, hatch, line_width = get_style(name0, corner_mask)
if varying == "thread_count":
label = f"{name0} {get_corner_mask_label(corner_mask)}"
else:
label = None
rects = ax.bar(xs[:, 0], mean0, width=1, color=color, edgecolor=edge_color, hatch=hatch,
linewidth=line_width, label=label, zorder=3)
if show_error:
labels = [with_time_units(m, s) for m, s in zip(mean0, error0)]
else:
labels = [with_time_units(m) for m in mean0]
ax.bar_label(rects, labels, padding=5, rotation="vertical", size="medium")
if varying != "thread_count":
for rect in rects:
in_bar_label(ax, rect, " 1")
# Threaded bars.
color, edge_color, hatch, line_width = get_style(name1, corner_mask)
label = varying.replace("_", " ")
label = f"{name1} {get_corner_mask_label(corner_mask)}\n({label} shown at bottom of bar)"
rects = ax.bar(xs[:, 1:-1].ravel(), mean1, width=1, color=color, edgecolor=edge_color,
hatch=hatch, linewidth=line_width, label=label, zorder=3)
labels = []
for i, (mean, error, speedup) in enumerate(zip(mean1, error1, speedups_flat)):
if show_error:
label = with_time_units(mean, error)
else:
label = with_time_units(mean)
if show_speedups and i % varying_count > 0:
label += f" (x {speedup:.2f})"
labels.append(label)
ax.bar_label(rects, labels, padding=5, rotation="vertical", size="medium")
for rect, value in zip(rects, np.tile(varying_values, ntype)):
in_bar_label(ax, rect, f" {value}")
if dataset == "random":
ymax = 2.0 if filled else 1.43
elif varying == "thread_count":
ymax = ax.get_ylim()[1]*1.32
else:
ymax = ax.get_ylim()[1]*1.25
ax.set_ylim(0, ymax)
ax.set_xticks(xs[:, 0] + 0.5*varying_count)
xticklabels = [str(t).split(".")[1] for t in fill_or_line_type]
xticklabels = list(map(capital_letters_to_newlines, xticklabels))
ax.set_xticklabels(xticklabels)
ax.legend(loc="upper right", framealpha=0.9)
ax.grid(axis="y", c="k" if mode == "light" else "w", alpha=0.2)
ax.set_ylabel("Time (seconds)")
ax.set_title(f"{filled_str} {dataset} n={n}")
fig.tight_layout()
filename = f"{file_prefix}_{filled_str}_{dataset}_{mode}.svg"
#print(f"Saving {filename}")
fig.savefig(filename, transparent=True)
if varying == "total_chunk_count":
# Print comparison of different algorithms using mpl default type.
print(f"Times and speedups: varying={varying} {filled_str} dataset={dataset}")
for i, t in enumerate(fill_or_line_type):
min_, max_ = speedups[i].min(), speedups[i].max()
print(f" {1.0/max_:.3f}-{1.0/min_:.3f}, {min_:.3f}-{max_:.3f}, {t}")
print()
def main() -> None:
loader = Loader()
print(f"Saving benchmark plots for machine={loader.machine} commit={loader.commit[:7]}")
for render in [False, True]:
for filled in [False, True]:
for dataset in ["simple", "random"]:
by_name_and_type(loader, filled, dataset, render, 1000)
for filled in [False, True]:
for dataset in ["simple", "random"]:
comparison_two_benchmarks(loader, filled, dataset, "total_chunk_count",
[4, 12, 40, 120])
for filled in [False, True]:
for dataset in ["simple", "random"]:
comparison_two_benchmarks(loader, filled, dataset, "thread_count", [1, 2, 4, 6])
if __name__ == "__main__":
main()
contourpy-1.2.1/build_requirements.txt 0000664 0000000 0000000 00000000206 14603035227 0020220 0 ustar 00root root 0000000 0000000 # These are the requirements from the build-system section of pyproject.toml
meson >= 1.2.0
meson-python >= 0.13.1
pybind11 >= 2.12.0
contourpy-1.2.1/docs/ 0000775 0000000 0000000 00000000000 14603035227 0014507 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/docs/Makefile 0000664 0000000 0000000 00000001172 14603035227 0016150 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
contourpy-1.2.1/docs/_static/ 0000775 0000000 0000000 00000000000 14603035227 0016135 5 ustar 00root root 0000000 0000000 contourpy-1.2.1/docs/_static/chunk_filled_random_dark.svg 0000664 0000000 0000000 00000207326 14603035227 0023660 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_filled_random_light.svg 0000664 0000000 0000000 00000204306 14603035227 0024041 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_filled_simple_dark.svg 0000664 0000000 0000000 00000207061 14603035227 0023665 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_filled_simple_light.svg 0000664 0000000 0000000 00000204041 14603035227 0024046 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_lines_random_dark.svg 0000664 0000000 0000000 00000170604 14603035227 0023531 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_lines_random_light.svg 0000664 0000000 0000000 00000166232 14603035227 0023721 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_lines_simple_dark.svg 0000664 0000000 0000000 00000171517 14603035227 0023546 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/chunk_lines_simple_light.svg 0000664 0000000 0000000 00000167100 14603035227 0023725 0 ustar 00root root 0000000 0000000
contourpy-1.2.1/docs/_static/config_filled_corner_dark.png 0000664 0000000 0000000 00000415473 14603035227 0024016 0 ustar 00root root 0000000 0000000 PNG
IHDR |؞= 9tEXtSoftware Matplotlib version3.6.2, https://matplotlib.org/(b pHYs a a?i IDATxy|T9gL@P*h"֪UԥUĥ^ڪՊVE"hd
*քd̜c!d29|ܴu@5mnYYY,X
6zjFǎ/{<쳬ZsJ1I}Y{Æ
k{fٳ+V4{g?6l`ӦM+lYشi6m|b(F5v"={6*F駟fyA_oPL>A1rH&LrK,K.a߾}s7aܹC
Fbܸq1gΜYPP;v,]t_=g}S-.ضmo8> W5[o1~xƏϯ~"i{̛7o>dzk.3V'.T`uٴi#G5/BX'?q\;cr1nVW-"ʕ+>}:kh3gD49ƎX.cƌaܸq̙3^z)y#ýˤI>|8]v?yTؖZWZ/=ӹ+;v,?),,lb(Z߿ӧcNDHjȑ#w}o^"Z.s9'O_~ =Sr2f?|&LJ$uk