pax_global_header00006660000000000000000000000064147542576030014527gustar00rootroot0000000000000052 comment=0aa1b386790d9a0b6289a098bc6acd0d1f222984 pyproj-3.7.1/000077500000000000000000000000001475425760300130625ustar00rootroot00000000000000pyproj-3.7.1/.all-contributorsrc000066400000000000000000000430221475425760300167140ustar00rootroot00000000000000{ "projectName": "pyproj", "projectOwner": "pyproj4", "repoType": "github", "repoHost": "https://github.com", "files": [ "README.md" ], "imageSize": 100, "commit": true, "commitConvention": "none", "contributors": [ { "login": "jswhit", "name": "Jeff Whitaker", "avatar_url": "https://avatars2.githubusercontent.com/u/579593?v=4", "profile": "https://github.com/jswhit", "contributions": [ "doc", "test", "code", "example", "ideas", "review", "question", "maintenance", "infra", "bug" ] }, { "login": "snowman2", "name": "Alan D. Snow", "avatar_url": "https://avatars3.githubusercontent.com/u/8699967?v=4", "profile": "https://github.com/snowman2", "contributions": [ "doc", "test", "code", "example", "maintenance", "infra", "ideas", "review", "question", "bug" ] }, { "login": "micahcochran", "name": "Micah Cochran", "avatar_url": "https://avatars0.githubusercontent.com/u/7433104?v=4", "profile": "https://github.com/micahcochran", "contributions": [ "doc", "test", "code", "maintenance", "infra", "review", "question", "bug" ] }, { "login": "jorisvandenbossche", "name": "Joris Van den Bossche", "avatar_url": "https://avatars2.githubusercontent.com/u/1020496?v=4", "profile": "https://jorisvandenbossche.github.io/", "contributions": [ "doc", "code", "ideas", "review", "question", "bug", "test" ] }, { "login": "cjmayo", "name": "Chris Mayo", "avatar_url": "https://avatars1.githubusercontent.com/u/921089?v=4", "profile": "https://github.com/cjmayo", "contributions": [ "test" ] }, { "login": "cffk", "name": "Charles Karney", "avatar_url": "https://avatars1.githubusercontent.com/u/2298266?v=4", "profile": "https://www.petrel.org", "contributions": [ "code", "test" ] }, { "login": "zippy1981", "name": "Justin Dearing", "avatar_url": "https://avatars3.githubusercontent.com/u/146930?v=4", "profile": "http://www.justaprogrammer.net/profile/justin", "contributions": [ "infra" ] }, { "login": "jdkloe", "name": "Jos de Kloe", "avatar_url": "https://avatars3.githubusercontent.com/u/1906112?v=4", "profile": "https://github.com/jdkloe", "contributions": [ "code", "test", "bug" ] }, { "login": "georgeouzou", "name": "George Ouzounoudis", "avatar_url": "https://avatars3.githubusercontent.com/u/16732042?v=4", "profile": "https://github.com/georgeouzou", "contributions": [ "code", "ideas" ] }, { "login": "djhoese", "name": "David Hoese", "avatar_url": "https://avatars3.githubusercontent.com/u/1828519?v=4", "profile": "https://github.com/djhoese", "contributions": [ "review", "ideas", "platform", "doc", "test", "code" ] }, { "login": "mitkin", "name": "Mikhail Itkin", "avatar_url": "https://avatars3.githubusercontent.com/u/3927849?v=4", "profile": "http://mitkin.github.io", "contributions": [ "code" ] }, { "login": "dopplershift", "name": "Ryan May", "avatar_url": "https://avatars2.githubusercontent.com/u/221526?v=4", "profile": "http://dopplershift.github.io", "contributions": [ "code" ] }, { "login": "artttt", "name": "artttt", "avatar_url": "https://avatars3.githubusercontent.com/u/4626281?v=4", "profile": "https://github.com/artttt", "contributions": [ "ideas" ] }, { "login": "ocefpaf", "name": "Filipe", "avatar_url": "https://avatars1.githubusercontent.com/u/950575?v=4", "profile": "http://ocefpaf.github.io/python4oceanographers", "contributions": [ "infra", "code", "platform", "doc" ] }, { "login": "heitorPB", "name": "Heitor", "avatar_url": "https://avatars2.githubusercontent.com/u/13461702?v=4", "profile": "https://github.com/heitorPB", "contributions": [ "doc" ] }, { "login": "sebastic", "name": "Bas Couwenberg", "avatar_url": "https://avatars3.githubusercontent.com/u/4605306?v=4", "profile": "https://github.com/sebastic", "contributions": [ "code", "platform", "test" ] }, { "login": "nickeubank", "name": "Nick Eubank", "avatar_url": "https://avatars0.githubusercontent.com/u/9683693?v=4", "profile": "https://github.com/nickeubank", "contributions": [ "code" ] }, { "login": "mdunphy", "name": "Michael Dunphy", "avatar_url": "https://avatars3.githubusercontent.com/u/9088426?v=4", "profile": "https://www.math.uwaterloo.ca/~mdunphy/", "contributions": [ "doc" ] }, { "login": "matthew-brett", "name": "Matthew Brett", "avatar_url": "https://avatars2.githubusercontent.com/u/67612?v=4", "profile": "http://matthew.dynevor.org", "contributions": [ "infra", "platform" ] }, { "login": "jdemaeyer", "name": "Jakob de Maeyer ", "avatar_url": "https://avatars1.githubusercontent.com/u/10531844?v=4", "profile": "https://naboa.de", "contributions": [ "code" ] }, { "login": "gitter-badger", "name": "The Gitter Badger", "avatar_url": "https://avatars2.githubusercontent.com/u/8518239?v=4", "profile": "https://gitter.im", "contributions": [ "doc" ] }, { "login": "bmwiedemann", "name": "Bernhard M. Wiedemann", "avatar_url": "https://avatars3.githubusercontent.com/u/637990?v=4", "profile": "http://lizards.opensuse.org/author/bmwiedemann/", "contributions": [ "code" ] }, { "login": "ReallyNiceGuy", "name": "Marco Aurélio da Costa", "avatar_url": "https://avatars0.githubusercontent.com/u/6545730?v=4", "profile": "https://github.com/ReallyNiceGuy", "contributions": [ "code" ] }, { "login": "ChrisBarker-NOAA", "name": "Christopher H. Barker", "avatar_url": "https://avatars2.githubusercontent.com/u/916576?v=4", "profile": "https://github.com/ChrisBarker-NOAA", "contributions": [ "code" ] }, { "login": "kbevers", "name": "Kristian Evers", "avatar_url": "https://avatars3.githubusercontent.com/u/13132571?v=4", "profile": "https://evers.dev/", "contributions": [ "question", "ideas", "doc" ] }, { "login": "rouault", "name": "Even Rouault", "avatar_url": "https://avatars2.githubusercontent.com/u/1192433?v=4", "profile": "http://www.spatialys.com/en/about/", "contributions": [ "question" ] }, { "login": "cgohlke", "name": "Christoph Gohlke", "avatar_url": "https://avatars3.githubusercontent.com/u/483428?v=4", "profile": "https://github.com/cgohlke", "contributions": [ "platform", "question", "bug", "test" ] }, { "login": "chrrrisw", "name": "Chris Willoughby", "avatar_url": "https://avatars0.githubusercontent.com/u/5555320?v=4", "profile": "https://github.com/chrrrisw", "contributions": [ "code" ] }, { "login": "glostis", "name": "Guillaume Lostis", "avatar_url": "https://avatars0.githubusercontent.com/u/25295717?v=4", "profile": "https://github.com/glostis", "contributions": [ "doc" ] }, { "login": "edpop", "name": "Eduard Popov", "avatar_url": "https://avatars3.githubusercontent.com/u/13479292?v=4", "profile": "https://github.com/edpop", "contributions": [ "doc" ] }, { "login": "jranalli", "name": "Joe Ranalli", "avatar_url": "https://avatars2.githubusercontent.com/u/7864460?v=4", "profile": "http://www.personal.psu.edu/jar339", "contributions": [ "bug", "code", "test" ] }, { "login": "gberardinelli", "name": "Greg Berardinelli", "avatar_url": "https://avatars0.githubusercontent.com/u/13799588?v=4", "profile": "https://github.com/gberardinelli", "contributions": [ "bug", "code", "ideas", "test" ] }, { "login": "mraspaud", "name": "Martin Raspaud", "avatar_url": "https://avatars1.githubusercontent.com/u/167802?v=4", "profile": "https://github.com/mraspaud", "contributions": [ "bug", "code", "test", "ideas" ] }, { "login": "mwtoews", "name": "Mike Taves", "avatar_url": "https://avatars1.githubusercontent.com/u/895458?v=4", "profile": "https://sites.google.com/site/mwtoews/", "contributions": [ "test" ] }, { "login": "habi", "name": "David Haberthür", "avatar_url": "https://avatars2.githubusercontent.com/u/1651235?v=4", "profile": "http://davidhaberthür.ch", "contributions": [ "doc" ] }, { "login": "mmodenesi", "name": "mmodenesi", "avatar_url": "https://avatars2.githubusercontent.com/u/5569789?v=4", "profile": "https://github.com/mmodenesi", "contributions": [ "bug", "code", "test" ] }, { "login": "jacob-indigo", "name": "jacob-indigo", "avatar_url": "https://avatars0.githubusercontent.com/u/48448372?v=4", "profile": "https://www.indigoag.com/", "contributions": [ "bug", "code" ] }, { "login": "rahulporuri", "name": "Poruri Sai Rahul", "avatar_url": "https://avatars0.githubusercontent.com/u/1926457?v=4", "profile": "https://rahulporuri.github.io", "contributions": [ "test" ] }, { "login": "underchemist", "name": "Yann-Sebastien Tremblay-Johnston", "avatar_url": "https://avatars1.githubusercontent.com/u/5283998?v=4", "profile": "https://medium.com/@underchemist", "contributions": [ "doc" ] }, { "login": "odidev", "name": "odidev", "avatar_url": "https://avatars2.githubusercontent.com/u/40816837?v=4", "profile": "https://github.com/odidev", "contributions": [ "platform" ] }, { "login": "idanmiara", "name": "Idan Miara", "avatar_url": "https://avatars.githubusercontent.com/u/26349741?v=4", "profile": "https://github.com/idanmiara", "contributions": [ "code", "doc", "example", "test" ] }, { "login": "direvus", "name": "Brendan Jurd", "avatar_url": "https://avatars.githubusercontent.com/u/312229?v=4", "profile": "https://github.com/direvus", "contributions": [ "doc", "design" ] }, { "login": "bjlittle", "name": "Bill Little", "avatar_url": "https://avatars.githubusercontent.com/u/2051656?v=4", "profile": "https://www.metoffice.gov.uk/", "contributions": [ "doc" ] }, { "login": "gerritholl", "name": "Gerrit Holl", "avatar_url": "https://avatars.githubusercontent.com/u/500246?v=4", "profile": "https://github.com/gerritholl", "contributions": [ "doc" ] }, { "login": "Kirill888", "name": "Kirill Kouzoubov", "avatar_url": "https://avatars.githubusercontent.com/u/1428024?v=4", "profile": "https://github.com/Kirill888", "contributions": [ "code" ] }, { "login": "hemberger", "name": "Dan Hemberger", "avatar_url": "https://avatars.githubusercontent.com/u/846186?v=4", "profile": "https://github.com/hemberger", "contributions": [ "bug", "code" ] }, { "login": "martinfleis", "name": "Martin Fleischmann", "avatar_url": "https://avatars.githubusercontent.com/u/36797143?v=4", "profile": "https://github.com/martinfleis", "contributions": [ "bug", "code", "test" ] }, { "login": "orontee", "name": "Matthias Meulien", "avatar_url": "https://avatars.githubusercontent.com/u/2065954?v=4", "profile": "https://github.com/orontee", "contributions": [ "code", "bug" ] }, { "login": "iboates", "name": "Isaac Boates", "avatar_url": "https://avatars.githubusercontent.com/u/13814358?v=4", "profile": "https://github.com/iboates", "contributions": [ "code", "bug", "test" ] }, { "login": "kdpenner", "name": "Kyle Penner", "avatar_url": "https://avatars.githubusercontent.com/u/9297904?v=4", "profile": "https://github.com/kdpenner", "contributions": [ "code", "bug", "doc" ] }, { "login": "paulcochrane", "name": "paulcochrane", "avatar_url": "https://avatars.githubusercontent.com/u/18310598?v=4", "profile": "https://github.com/paulcochrane", "contributions": [ "code", "doc", "test", "bug" ] }, { "login": "vot4anto", "name": "Antonio Ettorre", "avatar_url": "https://avatars.githubusercontent.com/u/56338190?v=4", "profile": "https://github.com/vot4anto", "contributions": [ "platform" ] }, { "login": "DWesl", "name": "DWesl", "avatar_url": "https://avatars.githubusercontent.com/u/22566757?v=4", "profile": "https://github.com/DWesl", "contributions": [ "code" ] }, { "login": "molinav", "name": "Víctor Molina García", "avatar_url": "https://avatars.githubusercontent.com/u/9979942?v=4", "profile": "https://github.com/molinav", "contributions": [ "platform" ] }, { "login": "skogler", "name": "Samuel Kogler", "avatar_url": "https://avatars.githubusercontent.com/u/1032405?v=4", "profile": "https://github.com/skogler", "contributions": [ "bug", "code" ] }, { "login": "shadchin", "name": "Alexander Shadchin", "avatar_url": "https://avatars.githubusercontent.com/u/61256?v=4", "profile": "https://github.com/shadchin", "contributions": [ "bug", "code" ] }, { "login": "greglucas", "name": "Greg Lucas", "avatar_url": "https://avatars.githubusercontent.com/u/12417828?v=4", "profile": "https://github.com/greglucas", "contributions": [ "code", "ideas", "maintenance" ] }, { "login": "dmahr1", "name": "Dan Mahr", "avatar_url": "https://avatars.githubusercontent.com/u/8354515?v=4", "profile": "https://github.com/dmahr1", "contributions": [ "code", "doc", "test" ] }, { "login": "rhugonnet", "name": "Romain Hugonnet", "avatar_url": "https://avatars.githubusercontent.com/u/28896516?v=4", "profile": "https://github.com/rhugonnet", "contributions": [ "code", "doc", "test" ] }, { "login": "jjimenezshaw", "name": "Javier Jimenez Shaw", "avatar_url": "https://avatars.githubusercontent.com/u/15678366?v=4", "profile": "https://javier.jimenezshaw.com/", "contributions": [ "code", "doc", "test" ] }, { "login": "djm93dev", "name": "Daniel McDonald", "avatar_url": "https://avatars.githubusercontent.com/u/101536185?v=4", "profile": "https://github.com/djm93dev", "contributions": [ "doc" ] }, { "login": "cyschneck", "name": "Cora Schneck", "avatar_url": "https://avatars.githubusercontent.com/u/22159116?v=4", "profile": "https://cyschneck.com/", "contributions": [ "doc", "test" ] }, { "login": "zanejgr", "name": "zanejgr", "avatar_url": "https://avatars.githubusercontent.com/u/14795919?v=4", "profile": "https://github.com/zanejgr", "contributions": [ "doc" ] }, { "login": "kloczek", "name": "Tomasz Kłoczko", "avatar_url": "https://avatars.githubusercontent.com/u/31284574?v=4", "profile": "https://github.com/kloczek", "contributions": [ "test" ] }, { "login": "tqa236", "name": "Trinh Quoc Anh", "avatar_url": "https://avatars.githubusercontent.com/u/25203655?v=4", "profile": "https://github.com/tqa236", "contributions": [ "test" ] }, { "login": "necabo", "name": "necabo", "avatar_url": "https://avatars.githubusercontent.com/u/23185845?v=4", "profile": "https://github.com/necabo", "contributions": [ "platform" ] } ], "contributorsPerLine": 7 } pyproj-3.7.1/.coveragerc000066400000000000000000000001651475425760300152050ustar00rootroot00000000000000[run] plugins = Cython.Coverage [report] # number of decimal points to report for coverage percentage precision = 2 pyproj-3.7.1/.flake8000066400000000000000000000005321475425760300142350ustar00rootroot00000000000000[flake8] max-line-length = 88 ignore = # Unnecessary dict/list/tuple call - rewrite as a literal C408 # whitespace before ':' - doesn't work well with black E203 # missing whitespace around operator - let black worry about that E225 # line break occurred before a binary operator - let black worry about that W503 pyproj-3.7.1/.github/000077500000000000000000000000001475425760300144225ustar00rootroot00000000000000pyproj-3.7.1/.github/ISSUE_TEMPLATE/000077500000000000000000000000001475425760300166055ustar00rootroot00000000000000pyproj-3.7.1/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000032521475425760300213010ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve labels: bug --- #### Code Sample, a copy-pastable example if possible A "Minimal, Complete and Verifiable Example" will make it much easier for maintainers to help you: - http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports - https://stackoverflow.com/help/minimal-reproducible-example ```python # Your code here ``` #### Problem description [this should explain **why** the current behavior is a problem and why the expected output is a better solution.] #### Expected Output #### Environment Information - Output from: `pyproj -v` - Output from: `python -m pyproj -v` - Output from: `python -c "import pyproj; pyproj.show_versions()"` - pyproj version (`python -c "import pyproj; print(pyproj.__version__)"`) - PROJ version (`python -c "import pyproj; print(pyproj.proj_version_str)"`) - PROJ data directory (`python -c "import pyproj; print(pyproj.datadir.get_data_dir())"`) - Python version (`python -c "import sys; print(sys.version.replace('\n', ' '))"`) - Operation System Information (`python -c "import platform; print(platform.platform())"`) #### Installation method - conda, pip wheel, from source, etc... #### Conda environment information (if you installed with conda):
Environment (conda list):
``` $ conda list proj ```

Details about conda and system ( conda info ):
``` $ conda info ```
pyproj-3.7.1/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000005671475425760300206050ustar00rootroot00000000000000contact_links: - name: Ask questions url: https://github.com/pyproj4/pyproj/discussions about: Please ask and answer questions here. - name: Ask questions from the GIS community url: https://gis.stackexchange.com/questions/tagged/pyproj about: To get answers from questions in the GIS commminuty, please ask and answer questions here with the pyproj tag. pyproj-3.7.1/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000003101475425760300223240ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project labels: proposal --- pyproj-3.7.1/.github/ISSUE_TEMPLATE/installation_issues.md000066400000000000000000000013721475425760300232260ustar00rootroot00000000000000--- name: Installation issues about: Issues installing pyproj (http://pyproj4.github.io/pyproj/stable/installation.html) labels: installation-issues --- #### Installation method/steps - Installation method (conda, pip wheel, from source, etc...) - How did you install PROJ? Where is it installed? - Please provide all commands/steps you used to install pyproj and PROJ. #### Environment Information - pyproj version you are attempting to install - PROJ version (Execute `proj` command and give version here.) - Python version (`python -c "import sys; print(sys.version.replace('\n', ' '))"`) - Operation System Information (`python -c "import platform; print(platform.platform())"`) pyproj-3.7.1/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000003221475425760300202200ustar00rootroot00000000000000 - [ ] Closes #xxxx - [ ] Tests added - [ ] Fully documented, including `history.rst` for all changes and `api/*.rst` for new API pyproj-3.7.1/.github/dependabot.yml000066400000000000000000000003321475425760300172500ustar00rootroot00000000000000version: 2 updates: # Maintain dependencies for GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: # Check for updates to GitHub Actions every week interval: "weekly" pyproj-3.7.1/.github/workflows/000077500000000000000000000000001475425760300164575ustar00rootroot00000000000000pyproj-3.7.1/.github/workflows/build_docs.yaml000066400000000000000000000027251475425760300214600ustar00rootroot00000000000000name: Publish Docs on: push: branches: [ main ] release: types: [ released ] concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true jobs: docs: name: Publish Docs runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Conda uses: mamba-org/setup-micromamba@v2 with: init-shell: bash environment-name: docs create-args: >- python=3.11 cython proj - name: Install and Build shell: bash run: | micromamba run -n docs python -m pip install -e . micromamba run -n docs python -m pip install -r requirements-docs.txt micromamba run -n docs sphinx-build -b html docs/ docs/_build/ - name: Deploy 🚀 uses: JamesIves/github-pages-deploy-action@v4 if: ${{ github.event_name == 'release' }} with: token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages folder: docs/_build/ clean: false target-folder: ${{ github.ref_name }} - name: Deploy 🚀 uses: JamesIves/github-pages-deploy-action@v4 if: ${{ github.event_name == 'push' }} with: token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages folder: docs/_build/ clean: false target-folder: latest pyproj-3.7.1/.github/workflows/release.yaml000066400000000000000000000200431475425760300207620ustar00rootroot00000000000000name: Wheels & sdist on: push: branches: [ main ] release: types: [ released, prereleased ] pull_request: # also build on PRs touching this file paths: - ".github/workflows/release.yaml" - "ci/proj-compile-wheels.sh" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true env: PROJ_VERSION: "9.5.1" DEBIAN_FRONTEND: noninteractive jobs: make_sdist: name: Make sdist runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Setup Conda uses: mamba-org/setup-micromamba@v2 with: init-shell: bash environment-name: sdist_env create-args: >- python-build twine cython proj=${{ matrix.proj-version }} - name: Make sdist shell: bash run: | micromamba run -n sdist_env python -m build --sdist - name: Check packages shell: bash run: | micromamba run -n sdist_env twine check --strict dist/* - name: Upload artifacts uses: actions/upload-artifact@v4 with: name: sdist path: ./dist/*.tar.gz retention-days: 5 build_wheels: name: Build ${{ matrix.arch }} wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} strategy: fail-fast: false matrix: include: - os: ubuntu-22.04 arch: x86_64 - os: ubuntu-22.04-arm arch: aarch64 - os: macos-13 arch: x86_64 cmake_osx_architectures: x86_64 macos_deployment_target: "13.0" - os: macos-14 arch: arm64 cmake_osx_architectures: arm64 macos_deployment_target: "14.0" - os: "windows-2022" arch: "auto64" triplet: "x64-windows" vcpkg_cache: "c:\\vcpkg\\installed" vcpkg_logs: "c:\\vcpkg\\buildtrees\\**\\*.log" - os: "windows-2022" arch: "auto32" triplet: "x86-windows" vcpkg_cache: "c:\\vcpkg\\installed" vcpkg_logs: "c:\\vcpkg\\buildtrees\\**\\*.log" steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 - name: Setup MSVC (32-bit) if: ${{ matrix.triplet == 'x86-windows' }} uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d # v1 with: architecture: 'x86' - name: Cache vcpkg if: contains(matrix.os, 'windows') uses: actions/cache@v4 id: vcpkgcache with: path: | ${{ matrix.vcpkg_cache }} # bump the last digit to avoid using previous build cache key: ${{ matrix.os }}-${{ matrix.triplet }}-vcpkg-proj${{ env.PROJ_VERSION }}-cache0 - name: Install PROJ with vcpkg if: contains(matrix.os, 'windows') env: VCPKG_DEFAULT_TRIPLET: ${{ matrix.triplet }} shell: bash run: | # Workaround for vcpkg downloading issue: # https://github.com/microsoft/vcpkg/issues/41199#issuecomment-2378255699 export SystemDrive="$SYSTEMDRIVE" export SystemRoot="$SYSTEMROOT" export windir="$WINDIR" cd "$VCPKG_INSTALLATION_ROOT" git pull > nul ./bootstrap-vcpkg.bat -disableMetrics vcpkg install --feature-flags="versions,manifests" --x-manifest-root=${GITHUB_WORKSPACE}/ci --x-install-root=$VCPKG_INSTALLATION_ROOT/installed mkdir -p ${GITHUB_WORKSPACE}/pyproj/proj_dir/share/proj cp "$VCPKG_INSTALLATION_ROOT/installed/${{ matrix.triplet }}/share/proj/"* ${GITHUB_WORKSPACE}/pyproj/proj_dir/share/proj/ - name: Build wheels uses: pypa/cibuildwheel@v2.22 env: CIBW_SKIP: "pp*-win* pp31*" CIBW_ARCHS: ${{ matrix.arch }} CIBW_ENVIRONMENT_LINUX: PROJ_WHEEL=true PROJ_NETWORK=ON PROJ_VERSION=${{ env.PROJ_VERSION }} PROJ_DIR=/project/pyproj/proj_dir CIBW_ENVIRONMENT_MACOS: PROJ_WHEEL=true PROJ_NETWORK=ON PROJ_VERSION=${{ env.PROJ_VERSION }} PROJ_DIR=${GITHUB_WORKSPACE}/pyproj/proj_dir CMAKE_OSX_ARCHITECTURES='${{ matrix.cmake_osx_architectures }}' MACOSX_DEPLOYMENT_TARGET=${{ matrix.macos_deployment_target }} LDFLAGS="${LDFLAGS} -Wl,-rpath,${GITHUB_WORKSPACE}/pyproj/proj_dir/lib" CIBW_ENVIRONMENT_WINDOWS: PROJ_WHEEL=true PROJ_NETWORK=ON PROJ_VERSION=${{ env.PROJ_VERSION }} PROJ_DIR=$VCPKG_INSTALLATION_ROOT/installed/${{ matrix.triplet }} CIBW_BEFORE_BUILD_WINDOWS: "python -m pip install delvewheel" # Add Windows System32 explicitly to the path https://github.com/adang1345/delvewheel/issues/54 CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "delvewheel repair --add-path C:/Windows/System32 --add-path C:/vcpkg/installed/${{ matrix.triplet }}/bin -w {dest_dir} {wheel}" CIBW_BEFORE_ALL_LINUX: bash ./ci/proj-compile-wheels.sh CIBW_BEFORE_ALL_MACOS: bash ./ci/proj-compile-wheels.sh CIBW_TEST_REQUIRES: cython pytest numpy --config-settings=setup-args="-Dallow-noblas=true" CIBW_BEFORE_TEST: python -m pip install shapely pandas xarray || echo "Optional requirements install failed" CIBW_TEST_COMMAND: > pyproj -v && python -c "import pyproj; pyproj.Proj(init='epsg:4269')" && cp -r {package}/test . && python -m pytest test -v -s - name: Upload artifacts uses: actions/upload-artifact@v4 with: name: wheels-${{ matrix.os }}-${{ matrix.arch }} path: ./wheelhouse/*.whl retention-days: 5 publish: name: Publish on PyPI needs: [make_sdist,build_wheels] if: ${{ github.repository_owner == 'pyproj4' && github.event_name != 'pull_request' }} runs-on: ubuntu-latest steps: - uses: actions/download-artifact@v4 with: name: sdist path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-ubuntu-22.04-x86_64 path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-ubuntu-22.04-arm-aarch64 path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-macos-13-x86_64 path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-macos-14-arm64 path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-windows-2022-auto64 path: dist - uses: actions/download-artifact@v4 continue-on-error: ${{ github.event_name == 'push' && github.ref_type != 'tag' }} with: name: wheels-windows-2022-auto32 path: dist - name: Upload Wheels to PyPI # release on every tag if: ${{ github.event_name == 'release' && github.ref_type == 'tag' }} uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} skip_existing: true # repository_url: https://test.pypi.org/legacy/ # To test - name: Upload Nightly Wheelsref if : ${{ github.ref_type == 'branch' && github.ref_name == 'main' }} uses: scientific-python/upload-nightly-action@82396a2ed4269ba06c6b2988bb4fd568ef3c3d6b with: artifacts_path: dist anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }} pyproj-3.7.1/.github/workflows/test_proj_latest.yaml000066400000000000000000000033531475425760300227340ustar00rootroot00000000000000name: Test PROJ and Cython Latest on: push: branches: [ main ] schedule: - cron: '0 0 * * 0' pull_request: # also build on PRs touching this file paths: - ".github/workflows/test_proj_latest.yaml" - "ci/proj-compile.sh" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true env: DEBIAN_FRONTEND: noninteractive jobs: test_proj_latest: name: PROJ Latest runs-on: ubuntu-latest env: PYPROJ_FULL_COVERAGE: YES PROJ_DIR: ${{ github.workspace }}/proj_install PROJ_DATA: ${{ github.workspace }}/proj_install/share/proj steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: '3.11' - name: Install PROJ shell: bash run: | sudo apt-get update sudo apt-get install -qq sqlite3 libsqlite3-dev libtiff-dev libcurl4-openssl-dev cmake bash ci/proj-compile.sh git - name: Install and Log Environment shell: bash run: | python -V python -m pip install --upgrade --pre --only-binary :all: -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython python -m pip install -e . python -m pip install -r requirements-test.txt pyproj -v - name: Test shell: bash run: | python -m pytest --cov-report term-missing --cov=pyproj --cov-report xml - name: Test Network shell: bash env: PROJ_NETWORK: ON run: | python -m pytest - name: Test Grids shell: bash run: | $PROJ_DIR/bin/projsync --quiet --bbox -175,0,-50,85 python -m pytest pyproj-3.7.1/.github/workflows/tests.yaml000066400000000000000000000134371475425760300205150ustar00rootroot00000000000000name: Tests on: push: branches: [ main ] pull_request: branches: [ main ] schedule: - cron: '0 0 * * 0' concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true env: PYPROJ_FULL_COVERAGE: YES DEBIAN_FRONTEND: noninteractive jobs: linting: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: '3.10' - uses: pre-commit/action@v3.0.1 - name: Install mypy run: | python -m pip install mypy types-certifi - name: mypy run: | mypy pyproj docker_tests: needs: linting runs-on: ubuntu-latest name: Docker | python=${{ matrix.python-version }} | PROJ=${{ matrix.proj-version }} container: ghcr.io/osgeo/proj:${{ matrix.proj-version }} strategy: fail-fast: false matrix: python-version: ['3.10', '3.11', '3.12'] proj-version: ['9.4.0'] include: - python-version: '3.10' proj-version: '9.3.1' - python-version: '3.10' proj-version: '9.2.1' steps: - uses: actions/checkout@v4 - name: Update run: | apt-get update apt-get -y install software-properties-common add-apt-repository -y ppa:deadsnakes/ppa apt-get update - name: Set up Python ${{ matrix.python-version }} run: | apt-get install -y --no-install-recommends \ python${{ matrix.python-version }} \ python${{ matrix.python-version }}-dev \ python${{ matrix.python-version }}-venv \ python3-pip \ g++ - name: Install dependencies run: | python${{ matrix.python-version }} -m venv testenv . testenv/bin/activate python -m pip install --upgrade pip python -m pip install -r requirements-dev.txt python -m pip install -e . python -m pip install -r requirements-test.txt - name: Test shell: bash run: | . testenv/bin/activate python -m pytest --cov-report term-missing --cov=pyproj --cov-report xml - name: Test Network shell: bash env: PROJ_NETWORK: ON run: | . testenv/bin/activate python -m pytest - name: Test Grids shell: bash run: | . testenv/bin/activate projsync --quiet --bbox -175,0,-50,85 python -m pytest conda_tests: needs: linting name: Conda ${{ matrix.os }} | ${{ matrix.python-implementation }}=${{ matrix.python-version }} | PROJ=${{ matrix.proj-version }} runs-on: ${{ matrix.os }} strategy: fail-fast: true matrix: os: [ubuntu-latest, macos-latest, windows-latest] python-version: ['3.10', '3.11', '3.12'] python-implementation: [python] proj-version: ['*'] # DISABLED UNTIL CONDA-FORGE PYPY SUPPORTS PYTHON 3.10+ # include: # - os: ubuntu-latest # python-version: '*' # python-implementation: pypy # proj-version: '*' steps: - uses: actions/checkout@v4 - name: Setup Conda uses: mamba-org/setup-micromamba@v2 with: # https://github.com/mamba-org/setup-micromamba/issues/225 micromamba-version: 1.5.10-0 init-shell: bash environment-name: test create-args: >- ${{ matrix.python-implementation }}=${{ matrix.python-version }} cython proj=${{ matrix.proj-version }} numpy shapely xarray pandas - name: Install Env shell: bash run: | if [ "${{ matrix.python-implementation }}" = "pypy" ]; then sed -i.bak '/xarray/d' requirements-test.txt; sed -i.bak '/pandas/d' requirements-test.txt; fi; micromamba run -n test python -m pip install -e . micromamba run -n test python -m pip install -r requirements-test.txt - name: Check and Log Environment shell: bash run: | micromamba run -n test python -V micromamba run -n test pyproj -v micromamba info - name: Install pylint shell: bash if: matrix.python-implementation == 'python' run: | micromamba run -n test python -m pip install pylint - name: pylint shell: bash if: matrix.python-implementation == 'python' run: | micromamba run -n test python -m pylint pyproj - name: Test with Coverage shell: bash if: matrix.python-implementation == 'python' run: | micromamba run -n test python -m pytest --cov-report term-missing --cov=pyproj --cov-report xml - name: Test shell: bash if: matrix.python-implementation == 'pypy' env: PROJ_NETWORK: OFF run: | micromamba run -n test python -m pytest - name: Test Network shell: bash env: PROJ_NETWORK: ON run: | micromamba run -n test python -m pytest - name: Test Grids shell: bash env: PROJ_NETWORK: OFF run: | micromamba run -n test projsync --quiet --bbox -175,0,-50,85 micromamba run -n test python -m pytest - name: Test Build docs shell: bash if: contains(matrix.os, 'ubuntu') && matrix.python-implementation == 'python' run: | micromamba run -n test python -m pip install -r requirements-docs.txt micromamba run -n test sphinx-build -b html docs/ docs/_build/ micromamba run -n test sphinx-build -b man docs/ docs/_build/ - uses: codecov/codecov-action@v5 pyproj-3.7.1/.gitignore000066400000000000000000000024561475425760300150610ustar00rootroot00000000000000pyproj/proj_dir/ pyproj/*.c pyproj/*/*.c pyproj/*/*.html pyproj/*.html proj-*/ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging wheelhouse/ .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # pycharm .idea/ # pytest .pytest_cache/ # vscode .vscode/ pyproj-3.7.1/.isort.cfg000066400000000000000000000000671475425760300147640ustar00rootroot00000000000000[settings] known_first_party=pyproj,test profile=black pyproj-3.7.1/.pre-commit-config.yaml000066400000000000000000000023511475425760300173440ustar00rootroot00000000000000default_language_version: python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black-pre-commit-mirror rev: 24.2.0 hooks: - id: black - repo: https://github.com/pycqa/isort rev: 5.13.2 hooks: - id: isort args: [setup.py, pyproj/, test/, docs/] - repo: https://github.com/asottile/blacken-docs rev: 1.16.0 hooks: - id: blacken-docs args: [--skip-errors] - repo: https://github.com/pycqa/flake8 rev: 7.0.0 hooks: - id: flake8 additional_dependencies: [flake8-comprehensions>=3.1.0] - id: flake8 name: flake8-pyx files: \.(pyx|pxd)$ types: - file args: [--append-config=flake8/cython.cfg] - repo: https://github.com/codespell-project/codespell rev: v2.2.6 hooks: - id: codespell - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.7.1 hooks: - id: ruff pyproj-3.7.1/.pylintrc000066400000000000000000000014211475425760300147250ustar00rootroot00000000000000[MAIN] ignore-patterns=.*\.pyi$ [MASTER] extension-pkg-whitelist=pyproj._crs, pyproj._transformer, pyproj._sync, pyproj._network, pyproj._geod, pyproj._context, pyproj._compat, pyproj._version, pyproj.database, pyproj.list [MESSAGES CONTROL] disable=cyclic-import, duplicate-code, line-too-long, logging-fstring-interpolation, protected-access, too-few-public-methods, too-many-arguments, too-many-locals, too-many-positional-arguments, too-many-public-methods [FORMAT] max-module-lines=1500 pyproj-3.7.1/.stickler.yml000066400000000000000000000001371475425760300155040ustar00rootroot00000000000000linters: black: config: ./pyproject.toml flake8: python: 3 max-line-length: 88 pyproj-3.7.1/CODE_OF_CONDUCT.md000066400000000000000000000064001475425760300156610ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq pyproj-3.7.1/CONTRIBUTING.md000066400000000000000000000332561475425760300153240ustar00rootroot00000000000000# Contributors Guide Based on the guide from: https://github.com/Unidata/MetPy Interested in helping build pyproj? Have code from your research that you believe others will find useful? Have a few minutes to tackle an issue? In this guide we will get you setup and integrated into contributing to pyproj! ## Introduction First off, thank you for considering contributing to pyproj. pyproj is community-driven project. It's people like you that make pyproj useful and successful. There are many ways to contribute, from writing tutorials or examples, improvements to the documentation, submitting bug reports and feature requests, or even writing code which can be incorporated into pyproj for everyone to use. Following these guidelines helps to communicate that you respect the time of the developers managing and developing this open source project. In return, they should reciprocate that respect in addressing your issue, assessing changes, and helping you finalize your pull requests. So, please take a few minutes to read through this guide and get setup for success with your pyproj contributions. We're glad you're here! ## What Can I Do? * Tackle any [issues](https://github.com/pyproj4/pyproj/issues) you wish! We have a special label for issues that beginners might want to try. Have a look at our [current beginner issues](https://github.com/pyproj4/pyproj/issues?q=is%3Aopen+is%3Aissue+label%3Agood-first-issue). Also have a look at if the issue is already assigned to someone - this helps us make sure that work is not duplicated if the issue is already being worked on by someone else. * Contribute code you already have. It does not need to be perfect! We will help you clean things up, test it, etc. * Make a tutorial or example of how to do something. * Improve documentation of a feature you found troublesome. * File a new issue if you run into problems! ## Ground Rules The goal is to maintain a diverse community that's pleasant for everyone. Please be considerate and respectful of others by following our [code of conduct](https://github.com/pyproj4/pyproj/blob/main/CODE_OF_CONDUCT.md). Other items: * Each pull request should consist of a logical collection of changes. You can include multiple bug fixes in a single pull request, but they should be related. For unrelated changes, please submit multiple pull requests. * Do not commit changes to files that are irrelevant to your feature or bugfix (eg: .gitignore). * Be willing to accept criticism and work on improving your code; we don't want to break other users' code, so care must be taken not to introduce bugs. * Be aware that the pull request review process is not immediate, and is generally proportional to the size of the pull request. ## Reporting a bug The easiest way to get involved is to report issues you encounter when using pyproj or by requesting something you think is missing. * Head over to the [issues](https://github.com/pyproj4/pyproj/issues) page. * Search to see if your issue already exists or has even been solved previously. * If you indeed have a new issue or request, click the "New Issue" button. * Fill in as much of the issue template as is relevant. Please be as specific as possible. Include the version of the code you were using, as well as what operating system you are running. If possible, include complete, minimal example code that reproduces the problem. ## Setting up your development environment We recommend using the [conda](https://conda.io/docs/) package manager for your Python environments. Our recommended setup for contributing is: * Install [miniconda](https://docs.conda.io/en/latest/miniconda.html) on your system. * Install git on your system if it is not already there (install XCode command line tools on a Mac or git bash on Windows) * Login to your GitHub account and make a fork of the [pyproj repository](https://github.com/pyproj4/pyproj/) by clicking the "Fork" button. * Clone your fork of the pyproj repository (in terminal on Mac/Linux or git shell/ GUI on Windows) in the location you'd like to keep it. We are partial to creating a ``git_repos`` directory in our home folder. ``git clone https://github.com/your-user-name/pyproj.git`` * Navigate to that folder in the terminal or in Anaconda Prompt if you're on Windows. ``cd pyproj`` * Connect your repository to the upstream (main project). ``git remote add upstream https://github.com/pyproj4/pyproj.git`` * Create the development environment by running ``conda create -n devel -c conda-forge cython proj numpy shapely xarray pandas``. * If the minimum PROJ version is not yet available, you can build PROJ from source using: ```bash export PROJ_DIR=$PWD/pyproj/proj_dir mkdir $PROJ_DIR bash ci/proj-compile.sh git ``` * Activate our new development environment ``conda activate devel`` on Mac/Linux or ``activate devel`` on Windows. * Install development requirements ``python -m pip install -r requirements-dev.txt`` * Make an editable install of pyproj by running ``python -m pip install -e .`` * Setup pre-commit hooks ``pre-commit install`` Now you're all set! You have an environment called ``devel`` that you can work in. You'll need to make sure to activate that environment next time you want to use it after closing the terminal or your system. If you want to get back to the root environment, just run ``source deactivate`` (just ``deactivate`` on Windows). ## Pull Requests The changes to the pyproj source (and documentation) should be made via GitHub pull requests against ``main``, even for those with administration rights. While it's tempting to make changes directly to ``main`` and push them up, it is better to make a pull request so that others can give feedback. If nothing else, this gives a chance for the automated tests to run on the PR. This can eliminate "brown paper bag" moments with buggy commits on the main branch. During the Pull Request process, before the final merge, it's a good idea to rebase the branch and squash together smaller commits. It's not necessary to flatten the entire branch, but it can be nice to eliminate small fixes and get the merge down to logically arranged commits. This can also be used to hide sins from history--this is the only chance, since once it hits ``main``, it's there forever! **Working on your first Pull Request?** You can learn how from this *free* video series [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github), Aaron Meurer's [tutorial on the git workflow](https://www.asmeurer.com/git-workflow/), or the guide [“How to Contribute to Open Source"](https://opensource.guide/how-to-contribute/). Commit the changes you made. Chris Beams has written a [guide](https://chris.beams.io/posts/git-commit/) on how to write good commit messages. Push to your fork and [submit a pull request]( https://github.com/pyproj4/pyproj/compare/). ## Documentation Now that you've made your awesome contribution, it's time to tell the world how to use it. Writing documentation strings is really important to make sure others use your functionality properly. Didn't write new functions? That's fine, but be sure that the documentation for the code you touched is still in great shape. It is not uncommon to find some strange wording or clarification that you can take care of while you are here. If you added a new function make sure that it gets marked as included if appropriate in the GEMPAK conversion table. You can write examples in the documentation if they are simple concepts to demonstrate. If your feature is more complex, consider adding to the examples or tutorials for pyproj. You can build the documentation locally to see how your changes will look. * Install docs requirements: ``make install-docs`` * Build the docs: ``make docs`` * Or, to build and open in a browser: ``make docs-browser`` ## Tests Unit tests are the lifeblood of the project, as it ensures that we can continue to add and change the code and stay confident that things have not broken. Running the tests requires ``pytest``, which is easily available through ``conda`` or ``pip``. It was also installed if you made our default ``devel`` environment. ### Running Tests Running the tests can be done by running ``python -m pytest``. Make sure you install the test requirements before running the tests ``python -m pip install -r requirements-test.txt``. Running the whole test suite isn't that slow, but can be a burden if you're working on just one module or a specific test. It is easy to run tests on a single directory: python -m pytest pyproj/calc A specific test can be run as: python -m pytest -k test_my_test_func_name ### Writing Tests Tests should ideally hit all of the lines of code added or changed. We have automated services that can help track down lines of code that are missed by tests. Watching the coverage has even helped find sections of dead code that could be removed! Let's say we are adding a simple function to add two numbers and return the result as a float or as a string. (This would be a silly function, but go with us here for demonstration purposes.) def add_as_float_or_string(a, b, as_string=False): res = a + b if as_string: return string(res) return res I can see two easy tests here: one for the results as a float and one for the results as a string. If I had added this to the ``calc`` module, I'd add those two tests in ``tests/test_calc.py``. def test_add_as_float_or_string_defaults(): res = add_as_float_or_string(3, 4) assert(res, 7) def test_add_as_float_or_string_string_return(): res = add_as_float_or_string(3, 4, as_string=True) assert(res, '7') There are plenty of more advanced testing concepts, like dealing with floating point comparisons, parameterizing tests, testing that exceptions are raised, and more. Have a look at the existing tests to get an idea of some of the common patterns. ## Code Style pyproj uses the Python code style outlined in [PEP8](https://pep8.org) and [black](https://github.com/python/black). We enforce this style as code is added to keep everything clean and uniform. To this end, part of the automated testing for pyproj checks style. To check style locally within the source directory you can use the ``flake8`` and ``black`` tools. Running it from the root of the source directory is as easy as running ``pre-commit run --all`` in the base of the repository. You can also just submit your PR and the kind robots will comment on all style violations as well. It can be a pain to make sure you have the right number of spaces around things, imports in order, and all of the other nits that the bots will find. It is very important though as this consistent style helps us keep pyproj readable, maintainable, and uniform. ## What happens after the pull request You've make your changes, documented them, added some tests, and submitted a pull request. What now? ### Automated Testing First, our army of never sleeping robots will begin a series of automated checks. The test suite, documentation, style, and more will be checked on various versions of Python with current and legacy packages. Travis CI will run testing on Linux and Mac, Appveyor will run tests on Windows. Other services will kick in and check if there is a drop in code coverage or any style variations that should be corrected. If you see a red mark by a service, something failed and clicking the "Details" link will give you more information. We're happy to help if you are stuck. The robots can be difficult to satisfy, but they are there to help everyone write better code. In some cases, there will be exceptions to their suggestions, but these are rare. If you make changes to your code and push again, the tests will automatically run again. ### Code Review At this point you're waiting on us. You should expect to hear at least a comment within a couple of days. We may suggest some changes or improvements or alternatives. Some things that will increase the chance that your pull request is accepted quickly: * Write tests. * Follow [PEP8](https://pep8.org) for style. (The `flake8` utility can help with this.) * Use [black](https://github.com/python/black) * Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) and consider using commit [conventions](https://www.conventionalcommits.org/). Pull requests will automatically have tests run by Travis. This includes running both the unit tests as well as the `flake8` code linter. ### Merging Once we're all happy with the pull request, it's time for it to get merged in. Only the maintainers can merge pull requests and you should never merge a pull request you have commits on as it circumvents the code review. If this is your first or second pull request, we'll likely help by rebasing and cleaning up the commit history for you. As your development skills increase, we'll help you learn how to do this. ## Allcontributors Recognition We do our best to recognize contributions, but we may miss some. If we did, please let us know. Also, if you would prefer not to be recognized on the README, please let us know. ## More Questions? If you're stuck somewhere or are interested in being a part of the community in other ways, feel free to contact us: * [GitHub Discussions](https://github.com/pyproj4/pyproj/discussions) * ["pyproj" tag on GIS Stack Exchange](https://gis.stackexchange.com/questions/tagged/pyproj) ## Further Reading There are a ton of great resources out there on contributing to open source and on the importance of writing tested and maintainable software. * [GitHub's Contributing to Open Source Guide](https://guides.github.com/activities/contributing-to-open-source/) * [Zen of Scientific Software Maintenance](https://jrleeman.github.io/ScientificSoftwareMaintenance/) pyproj-3.7.1/HOW_TO_RELEASE.md000066400000000000000000000044001475425760300156010ustar00rootroot00000000000000# Preparing a pyproj release Preparing a pyproj release is a two-phase process. ## Phase 1: Release Candidate In this phase, we want to ensure all the builds work on all platforms and methods of distribution for the next release. ### Add the rc postfix The first step in this phase is to update the version number `__version__` in `__init__.py` to the next release `..`. Then, add the `rc` style posfix following the [PEP-440](https://www.python.org/dev/peps/pep-0440/#pre-releases) conventions. ### Create a tag on the repository The next step is to create a tag with the same name as the version just added. This can be done using the git command line or from https://github.com/pyproj4/pyproj/tags. ### Test the release builds 1. Check the wheels built at https://github.com/pyproj4/pyproj using GitHub Actions. 2. Create a draft PR at https://github.com/conda-forge/pyproj-feedstock and verify tests pass. 3. Verify Debian builds were successful. 4. Verify Fedora builds were successful. 5. Verify the docs build successfully. ## Phase 2: Make the release After the candidate has proven itself, it will be promoted to a final release. ### Remove the rc postfix Remove the `rc` postfix from the the version number `__version__` in `__init__.py`. ### Create a tag on the repository The next step is to create a tag with the name `..`. This can be done using the git command line or from https://github.com/pyproj4/pyproj/tags. Next, go through the history and add release notes (see: [automatically generated release notes](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes)). Make sure to acknowledge contributions made by others in the release. ### The wheels The wheels are tested with each merge to main and uploaded to https://pypi.anaconda.org/scientific-python-nightly-wheels/simple in GitHub Actions. They are uploaded to pypi on pre-release and release in GitHub Actions. ### Verify conda-forge build is correct A PR for `pyproj` will be generated automatically after you push to pypi. Verify all is correct on the PR at https://github.com/conda-forge/pyproj-feedstock. ### Update the docs On the `gh-pages` branch, update the stable symlink to point to the next version. pyproj-3.7.1/LICENSE000066400000000000000000000021361475425760300140710ustar00rootroot00000000000000Copyright (c) 2006-2018, Jeffrey Whitaker. Copyright (c) 2019-2024, Open source contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pyproj-3.7.1/LICENSE_proj000066400000000000000000000032701475425760300151230ustar00rootroot00000000000000 All source, data files and other contents of the PROJ.4 package are available under the following terms. Note that the PROJ 4.3 and earlier was "public domain" as is common with US government work, but apparently this is not a well defined legal term in many countries. I am placing everything under the following MIT style license because I believe it is effectively the same as public domain, allowing anyone to use the code as they wish, including making proprietary derivatives. Though I have put my own name as copyright holder, I don't mean to imply I did the work. Essentially all work was done by Gerald Evenden. -------------- Copyright (c) 2000, Frank Warmerdam Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pyproj-3.7.1/MANIFEST.in000066400000000000000000000006041475425760300146200ustar00rootroot00000000000000include README.md include MANIFEST.in include pyproject.toml include LICENSE include Makefile include pytest.ini include pyproj/py.typed include pyproj/*.pyd include pyproj/*.pyx include pyproj/*.pxd include pyproj/*.pxi include pyproj/*.pyi include test/sample.out include test/*.py include test/*/*.py exclude pyproj/*.c recursive-include docs * prune docs/_build prune pyproj/proj_dir pyproj-3.7.1/Makefile000066400000000000000000000045351475425760300145310ustar00rootroot00000000000000# This was based on a Makefile by Kirk Hansen .PHONY: clean clean-test clean-pyc clean-build clean-setup clean-cython docs help test .DEFAULT_GOAL := help define PRINT_HELP_PYSCRIPT import re, sys for line in sys.stdin: match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) if match: target, help = match.groups() print("%-20s %s" % (target, help)) endef export PRINT_HELP_PYSCRIPT help: @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) clean: clean-build clean-pyc clean-test clean-setup clean-cython ## remove all build, test, coverage and Python artifacts clean-build: ## remove build artifacts rm -fr build/ rm -fr dist/ rm -fr .eggs/ find . -name '*.egg-info' -exec rm -fr {} + find . -name '*.egg' -exec rm -f {} + clean-pyc: ## remove Python file artifacts find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts rm -f .coverage rm -fr htmlcov/ rm -fr .pytest_cache clean-setup: ## run python setup.py clean python setup.py clean clean-cython: ## clean the cython files rm -f pyproj/*.so rm -f pyproj/*/*.so rm -f pyproj/*/*.c rm -f pyproj/*.c check-type: mypy pyproj check: check-type pre-commit run --show-diff-on-failure --all-files test: ## run tests py.test test-verbose: ## run tests with full verbosity py.test -vv -s test-coverage: ## run tests and generate coverage report py.test --cov-report term-missing --cov=pyproj -v -s install-docs: ## Install requirements for building documentation python -m pip install -r requirements-docs.txt docs: ## generate Sphinx HTML documentation, including API docs $(MAKE) -C docs clean $(MAKE) -C docs html docs-browser: docs ## generate Sphinx HTML documentation, including API docs and open in a browser python -m webbrowser -t docs/_build/html/index.html docs-man: ## generate Sphinx man pages for CLI $(MAKE) -C docs clean $(MAKE) -C docs man install: clean ## install the package to the active Python's site-packages python -m pip install . install-dev: clean ## install development version to active Python's site-packages python -m pip install -r requirements-dev.txt pre-commit install python -m pip install -r requirements-test.txt PYPROJ_FULL_COVERAGE=YES python -m pip install -e . pyproj-3.7.1/README.md000066400000000000000000000727271475425760300143600ustar00rootroot00000000000000![Pyproj logo](https://raw.githubusercontent.com/pyproj4/pyproj/main/docs/media/logo.png) # pyproj Python interface to [PROJ](http://proj.org) (cartographic projections and coordinate transformations library).

All Contributors GitHub Actions Build Status Codecov Status PyPI Downloads Anaconda-Server Badge Code style: black pre-commit DOI

## Documentation - Stable: http://pyproj4.github.io/pyproj/stable/ - Latest: https://pyproj4.github.io/pyproj/latest/ ## Bugs/Questions - Report bugs/feature requests: https://github.com/pyproj4/pyproj/issues - Ask questions: https://github.com/pyproj4/pyproj/discussions - Ask the GIS community: https://gis.stackexchange.com/questions/tagged/pyproj ## Contributors ✨ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):

Jeff Whitaker

📖 ⚠️ 💻 💡 🤔 👀 💬 🚧 🚇 🐛

Alan D. Snow

📖 ⚠️ 💻 💡 🚧 🚇 🤔 👀 💬 🐛

Micah Cochran

📖 ⚠️ 💻 🚧 🚇 👀 💬 🐛

Joris Van den Bossche

📖 💻 🤔 👀 💬 🐛 ⚠️

Chris Mayo

⚠️

Charles Karney

💻 ⚠️

Justin Dearing

🚇

Jos de Kloe

💻 ⚠️ 🐛

George Ouzounoudis

💻 🤔

David Hoese

👀 🤔 📦 📖 ⚠️ 💻

Mikhail Itkin

💻

Ryan May

💻

artttt

🤔

Filipe

🚇 💻 📦 📖

Heitor

📖

Bas Couwenberg

💻 📦 ⚠️

Nick Eubank

💻

Michael Dunphy

📖

Matthew Brett

🚇 📦

Jakob de Maeyer

💻

The Gitter Badger

📖

Bernhard M. Wiedemann

💻

Marco Aurélio da Costa

💻

Christopher H. Barker

💻

Kristian Evers

💬 🤔 📖

Even Rouault

💬

Christoph Gohlke

📦 💬 🐛 ⚠️

Chris Willoughby

💻

Guillaume Lostis

📖

Eduard Popov

📖

Joe Ranalli

🐛 💻 ⚠️

Greg Berardinelli

🐛 💻 🤔 ⚠️

Martin Raspaud

🐛 💻 ⚠️ 🤔

Mike Taves

⚠️

David Haberthür

📖

mmodenesi

🐛 💻 ⚠️

jacob-indigo

🐛 💻

Poruri Sai Rahul

⚠️

Yann-Sebastien Tremblay-Johnston

📖

odidev

📦

Idan Miara

💻 📖 💡 ⚠️

Brendan Jurd

📖 🎨

Bill Little

📖

Gerrit Holl

📖

Kirill Kouzoubov

💻

Dan Hemberger

🐛 💻

Martin Fleischmann

🐛 💻 ⚠️

Matthias Meulien

💻 🐛

Isaac Boates

💻 🐛 ⚠️

Kyle Penner

💻 🐛 📖

paulcochrane

💻 📖 ⚠️ 🐛

Antonio Ettorre

📦

DWesl

💻

Víctor Molina García

📦

Samuel Kogler

🐛 💻

Alexander Shadchin

🐛 💻

Greg Lucas

💻 🤔 🚧

Dan Mahr

💻 📖 ⚠️

Romain Hugonnet

💻 📖 ⚠️

Javier Jimenez Shaw

💻 📖 ⚠️

Daniel McDonald

📖

Cora Schneck

📖 ⚠️

zanejgr

📖

Tomasz Kłoczko

⚠️

Trinh Quoc Anh

⚠️

necabo

📦
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! pyproj-3.7.1/ci/000077500000000000000000000000001475425760300134555ustar00rootroot00000000000000pyproj-3.7.1/ci/proj-compile-wheels.sh000066400000000000000000000240201475425760300176740ustar00rootroot00000000000000# INSTALL PROJ & DEPENDENCIES FOR WHEELS # Test for macOS with [ -n "$IS_MACOS" ] SQLITE_VERSION=3460100 LIBTIFF_VERSION=4.6.0 CURL_VERSION=8.6.0 NGHTTP2_VERSION=1.60.0 # ------------------------------------------ # From: https://github.com/multi-build/multibuild/ # ------------------------------------------ BUILD_PREFIX="${BUILD_PREFIX:-/usr/local}" OPENSSL_ROOT=${OPENSSL_ROOT:-openssl-3.2.1} # Hash from https://www.openssl.org/source/openssl-3.2.1.tar.gz.sha256 OPENSSL_HASH=${OPENSSL_HASH:-6ae015467dabf0469b139ada93319327be24b98251ffaeceda0221848dc09262} OPENSSL_DOWNLOAD_URL=${OPENSSL_DOWNLOAD_URL:-https://www.openssl.org/source} if [ $(uname) == "Darwin" ]; then IS_MACOS=1; fi if [ -f /etc/alpine-release ]; then IS_ALPINE=1 fi if [ -z "$IS_MACOS" ]; then # Strip all binaries after compilation. STRIP_FLAGS=${STRIP_FLAGS:-"-Wl,-strip-all"} export CFLAGS="${CFLAGS:-$STRIP_FLAGS}" export CXXFLAGS="${CXXFLAGS:-$STRIP_FLAGS}" export FFLAGS="${FFLAGS:-$STRIP_FLAGS}" fi export CPPFLAGS_BACKUP="$CPPFLAGS" export LIBRARY_PATH_BACKUP="$LIBRARY_PATH" export PKG_CONFIG_PATH_BACKUP="$PKG_CONFIG_PATH" function update_env_for_build_prefix { # Promote BUILD_PREFIX on search path to any newly built libs export CPPFLAGS="-I$BUILD_PREFIX/include $CPPFLAGS_BACKUP" export LIBRARY_PATH="$BUILD_PREFIX/lib:$LIBRARY_PATH_BACKUP" export PKG_CONFIG_PATH="$BUILD_PREFIX/lib/pkgconfig/:$PKG_CONFIG_PATH_BACKUP" # Add binary path for configure utils etc export PATH="$BUILD_PREFIX/bin:$PATH" } function rm_mkdir { # Remove directory if present, then make directory local path=$1 if [ -z "$path" ]; then echo "Need not-empty path"; exit 1; fi if [ -d "$path" ]; then rm -rf $path; fi mkdir $path } function untar { local in_fname=$1 if [ -z "$in_fname" ];then echo "in_fname not defined"; exit 1; fi local extension=${in_fname##*.} case $extension in tar) tar -xf $in_fname ;; gz|tgz) tar -zxf $in_fname ;; bz2) tar -jxf $in_fname ;; zip) unzip -qq $in_fname ;; xz) if [ -n "$IS_MACOS" ]; then tar -xf $in_fname else if [[ ! $(type -P "unxz") ]]; then echo xz must be installed to uncompress file; exit 1 fi unxz -c $in_fname | tar -xf - fi ;; *) echo Did not recognize extension $extension; exit 1 ;; esac } function suppress { # Run a command, show output only if return code not 0. # Takes into account state of -e option. # Compare # https://unix.stackexchange.com/questions/256120/how-can-i-suppress-output-only-if-the-command-succeeds#256122 # Set -e stuff agonized over in # https://unix.stackexchange.com/questions/296526/set-e-in-a-subshell local tmp=$(mktemp tmp.XXXXXXXXX) || return local errexit_set echo "Running $@" if [[ $- = *e* ]]; then errexit_set=true; fi set +e ( if [[ -n $errexit_set ]]; then set -e; fi; "$@" > "$tmp" 2>&1 ) ; ret=$? [ "$ret" -eq 0 ] || cat "$tmp" rm -f "$tmp" if [[ -n $errexit_set ]]; then set -e; fi return "$ret" } function yum_install { # CentOS 5 yum doesn't fail in some cases, e.g. if package is not found # https://serverfault.com/questions/694942/yum-should-error-when-a-package-is-not-available yum install -y "$1" && rpm -q "$1" } function install_rsync { # install rsync via package manager if [ -n "$IS_MACOS" ]; then # macOS. The colon in the next line is the null command : elif [ -n "$IS_ALPINE" ]; then [[ $(type -P rsync) ]] || apk add rsync elif [[ $MB_ML_VER == "_2_24" ]]; then # debian:9 based distro [[ $(type -P rsync) ]] || apt-get install -y rsync else # centos based distro [[ $(type -P rsync) ]] || yum_install rsync fi } function fetch_unpack { # Fetch input archive name from input URL # Parameters # url - URL from which to fetch archive # archive_fname (optional) archive name # # Echos unpacked directory and file names. # # If `archive_fname` not specified then use basename from `url` # If `archive_fname` already present at download location, use that instead. local url=$1 if [ -z "$url" ];then echo "url not defined"; exit 1; fi local archive_fname=${2:-$(basename $url)} local arch_sdir="${ARCHIVE_SDIR:-archives}" if [ -z "$IS_MACOS" ]; then local extension=${archive_fname##*.} if [ "$extension" == "xz" ]; then ensure_xz fi fi # Make the archive directory in case it doesn't exist mkdir -p $arch_sdir local out_archive="${arch_sdir}/${archive_fname}" # If the archive is not already in the archives directory, get it. if [ ! -f "$out_archive" ]; then # Source it from multibuild archives if available. local our_archive="${MULTIBUILD_DIR}/archives/${archive_fname}" if [ -f "$our_archive" ]; then ln -s $our_archive $out_archive else # Otherwise download it. curl -L $url > $out_archive fi fi # Unpack archive, refreshing contents, echoing dir and file # names. rm_mkdir arch_tmp install_rsync (cd arch_tmp && \ untar ../$out_archive && \ ls -1d * && rsync --delete -ah * ..) } function build_simple { # Example: build_simple libpng $LIBPNG_VERSION \ # https://download.sourceforge.net/libpng tar.gz \ # --additional --configure --arguments local name=$1 local version=$2 local url=$3 local ext=${4:-tar.gz} local configure_args=${@:5} if [ -e "${name}-stamp" ]; then return fi local name_version="${name}-${version}" local archive=${name_version}.${ext} fetch_unpack $url/$archive (cd $name_version \ && ./configure --prefix=$BUILD_PREFIX $configure_args \ && make -j4 \ && make install) touch "${name}-stamp" } function get_modern_cmake { # Install cmake >= 2.8 if [ -n "$IS_ALPINE" ]; then return; fi # alpine has modern cmake already local cmake=cmake if [ -n "$IS_MACOS" ]; then brew install cmake > /dev/null elif [[ $MB_ML_VER == "_2_24" ]]; then # debian:9 based distro apt-get install -y cmake else if [ "`yum search cmake | grep ^cmake28\.`" ]; then cmake=cmake28 fi # centos based distro yum_install $cmake > /dev/null fi echo $cmake } function build_zlib { # Gives an old but safe version if [ -n "$IS_MACOS" ]; then return; fi # OSX has zlib already if [ -n "$IS_ALPINE" ]; then return; fi # alpine has zlib already if [ -e zlib-stamp ]; then return; fi if [[ $MB_ML_VER == "_2_24" ]]; then # debian:9 based distro apt-get install -y zlib1g-dev else #centos based distro yum_install zlib-devel fi touch zlib-stamp } function build_perl { if [ -n "$IS_MACOS" ]; then return; fi # OSX has perl already if [ -n "$IS_ALPINE" ]; then return; fi # alpine has perl already if [ -e perl-stamp ]; then return; fi if [[ $MB_ML_VER == "_2_24" ]]; then # debian:9 based distro apt-get install -y perl else # centos based distro yum_install perl-core fi touch perl-stamp } function build_openssl { if [ -e openssl-stamp ]; then return; fi suppress build_perl fetch_unpack ${OPENSSL_DOWNLOAD_URL}/${OPENSSL_ROOT}.tar.gz check_sha256sum $ARCHIVE_SDIR/${OPENSSL_ROOT}.tar.gz ${OPENSSL_HASH} (cd ${OPENSSL_ROOT} \ && ./config no-ssl2 no-shared -fPIC --prefix=$BUILD_PREFIX \ && make -j4 \ && make install) touch openssl-stamp } # ------------------------------------------ function build_nghttp2 { if [ -e nghttp2-stamp ]; then return; fi fetch_unpack https://github.com/nghttp2/nghttp2/releases/download/v${NGHTTP2_VERSION}/nghttp2-${NGHTTP2_VERSION}.tar.gz (cd nghttp2-${NGHTTP2_VERSION} \ && ./configure --enable-lib-only --prefix=$BUILD_PREFIX \ && make -j4 \ && make install) touch nghttp2-stamp } function build_curl_ssl { if [ -e curl-stamp ]; then return; fi CFLAGS="$CFLAGS -g -O2" CXXFLAGS="$CXXFLAGS -g -O2" suppress build_nghttp2 local flags="--prefix=$BUILD_PREFIX --with-nghttp2=$BUILD_PREFIX --with-zlib=$BUILD_PREFIX" if [ -n "$IS_MACOS" ]; then flags="$flags --with-darwinssl" else # manylinux suppress build_openssl flags="$flags --with-ssl --without-libpsl" LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$BUILD_PREFIX/lib fi fetch_unpack https://curl.se/download/curl-${CURL_VERSION}.tar.gz (cd curl-${CURL_VERSION} \ && if [ -z "$IS_MACOS" ]; then \ LIBS=-ldl ./configure $flags; else \ ./configure $flags; fi\ && make -j4 \ && make install) touch curl-stamp } function build_libtiff { if [ -e libtiff-stamp ]; then return; fi build_simple tiff $LIBTIFF_VERSION https://download.osgeo.org/libtiff touch libtiff-stamp } function build_sqlite { if [ -z "$IS_MACOS" ]; then CFLAGS="$CFLAGS -DHAVE_PREAD64 -DHAVE_PWRITE64" fi if [ -e sqlite-stamp ]; then return; fi build_simple sqlite-autoconf $SQLITE_VERSION https://www.sqlite.org/2024 touch sqlite-stamp } function build_proj { if [ -e proj-stamp ]; then return; fi suppress get_modern_cmake fetch_unpack https://download.osgeo.org/proj/proj-${PROJ_VERSION}.tar.gz suppress build_curl_ssl (cd proj-${PROJ_VERSION:0:5} \ && cmake . \ -DCMAKE_INSTALL_PREFIX=$PROJ_DIR \ -DBUILD_SHARED_LIBS=ON \ -DCMAKE_BUILD_TYPE=Release \ -DENABLE_IPO=ON \ -DBUILD_APPS:BOOL=OFF \ -DBUILD_TESTING:BOOL=OFF \ -DCMAKE_PREFIX_PATH=$BUILD_PREFIX \ -DCMAKE_INSTALL_LIBDIR=lib \ && cmake --build . -j$(nproc) \ && cmake --install .) touch proj-stamp } # Run installation process suppress update_env_for_build_prefix suppress build_zlib suppress build_sqlite suppress build_libtiff build_proj pyproj-3.7.1/ci/proj-compile.sh000077500000000000000000000015121475425760300164130ustar00rootroot00000000000000#!/bin/bash pushd . echo "Building PROJ ($1) from source..." BUILD_PROJ_DIR=proj-${1:0:5} # Download PROJ if [[ $1 == "git" ]]; then git clone https://github.com/OSGeo/PROJ.git ${BUILD_PROJ_DIR} else curl https://download.osgeo.org/proj/proj-$1.tar.gz > ${BUILD_PROJ_DIR}.tar.gz tar zxf ${BUILD_PROJ_DIR}.tar.gz rm ${BUILD_PROJ_DIR}.tar.gz fi cd ${BUILD_PROJ_DIR} mkdir build cd build # build using cmake cmake .. \ -DCMAKE_INSTALL_PREFIX=$PROJ_DIR \ -DBUILD_SHARED_LIBS=ON \ -DCMAKE_BUILD_TYPE=Release \ -DENABLE_IPO=ON \ -DBUILD_CCT:BOOL=OFF \ -DBUILD_CS2CS:BOOL=OFF \ -DBUILD_GEOD:BOOL=OFF \ -DBUILD_GIE:BOOL=OFF \ -DBUILD_GMOCK:BOOL=OFF \ -DBUILD_PROJINFO:BOOL=OFF \ -DBUILD_TESTING:BOOL=OFF cmake --build . -j$(nproc) cmake --install . # cleanup cd ../.. rm -rf ${BUILD_PROJ_DIR} popd pyproj-3.7.1/ci/vcpkg.json000066400000000000000000000003431475425760300154620ustar00rootroot00000000000000{ "name": "pyproj", "version": "3.7.1", "dependencies": [ { "name": "proj", "version>=": "9.5.1" } ], "builtin-baseline": "4ec74919dbf24931b29347b000c74374e8bbde35" } pyproj-3.7.1/codecov.yml000066400000000000000000000002431475425760300152260ustar00rootroot00000000000000coverage: status: project: default: target: 94% # the required coverage value threshold: 0.2% # the leniency in hitting the target pyproj-3.7.1/docs/000077500000000000000000000000001475425760300140125ustar00rootroot00000000000000pyproj-3.7.1/docs/Makefile000066400000000000000000000011401475425760300154460ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = python3 -msphinx SPHINXPROJ = pyproj SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) pyproj-3.7.1/docs/advanced_examples.rst000066400000000000000000000242541475425760300202160ustar00rootroot00000000000000.. _advanced_examples: Advanced Examples ================= Optimize Transformations ------------------------ Here are a few tricks to try out if you want to optimize your transformations. Repeated transformations ~~~~~~~~~~~~~~~~~~~~~~~~ .. versionadded:: 2.1.0 If you use the same transform, using the :class:`pyproj.transformer.Transformer` can help optimize your transformations. .. code-block:: python import numpy from pyproj import Transformer, transform transformer = Transformer.from_crs(2263, 4326) x_coords = numpy.random.randint(80000, 120000) y_coords = numpy.random.randint(200000, 250000) Example with :func:`pyproj.transformer.transform`: .. code-block:: python transform(2263, 4326, x_coords, y_coords) Results: 160 ms ± 3.68 ms per loop (mean ± std. dev. of 7 runs, 1 loop each) Example with :class:`pyproj.transformer.Transformer`: .. code-block:: python transformer.transform(x_coords, y_coords) Results: 6.32 µs ± 49.7 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each) Transforming with the same projections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ pyproj skips `noop` transformations. Transformation Group -------------------- .. versionadded:: 2.3.0 The :class:`pyproj.transformer.TransformerGroup` provides both available transformations as well as missing transformations. 1. Helpful if you want to use an alternate transformation and have a good reason for it. .. code-block:: python >>> from pyproj.transformer import TransformerGroup >>> trans_group = TransformerGroup("EPSG:4326","EPSG:2964") >>> trans_group - transformers: 8 - unavailable_operations: 1 >>> trans_group.best_available True >>> trans_group.transformers[0].transform(66, -153) (149661.2825058747, 5849322.174897663) >>> trans_group.transformers[1].transform(66, -153) (149672.928811047, 5849311.372139239) >>> trans_group.transformers[2].transform(66, -153) (149748.32734832275, 5849274.621409136) 2. Helpful if want to check that the best possible transformation exists. And if not, how to get the missing grid. .. code-block:: python >>> from pyproj.transformer import TransformerGroup >>> tg = TransformerGroup("EPSG:4326", "+proj=aea +lat_0=50 +lon_0=-154 +lat_1=55 +lat_2=65 +x_0=0 +y_0=0 +datum=NAD27 +no_defs +type=crs +units=m", always_xy=True) UserWarning: Best transformation is not available due to missing Grid(short_name=ntv2_0.gsb, full_name=, package_name=proj-datumgrid-north-america, url=https://download.osgeo.org/proj/proj-datumgrid-north-america-latest.zip, direct_download=True, open_license=True, available=False) f"{operation.grids[0]!r}" >>> tg - transformers: 37 - unavailable_operations: 41 >>> tg.transformers[0].description 'axis order change (2D) + Inverse of NAD27 to WGS 84 (3) + axis order change (2D) + unknown' >>> tg.unavailable_operations[0].name 'Inverse of NAD27 to WGS 84 (33) + axis order change (2D) + unknown' >>> tg.unavailable_operations[0].grids[0].url 'https://download.osgeo.org/proj/proj-datumgrid-north-america-latest.zip' Area of Interest ---------------- .. versionadded:: 2.3.0 Depending on the location of your transformation, using the area of interest may impact which transformation operation is selected in the transformation. .. code-block:: python >>> from pyproj.transformer import Transformer, AreaOfInterest >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:2694") >>> transformer Description: Inverse of Pulkovo 1995 to WGS 84 (2) + 3-degree Gauss-Kruger zone 60 Area of Use: - name: Russia - bounds: (18.92, 39.87, -168.97, 85.2) >>> transformer = Transformer.from_crs( ... "EPSG:4326", ... "EPSG:2694", ... area_of_interest=AreaOfInterest(-136.46, 49.0, -60.72, 83.17), ... ) >>> transformer Description: Inverse of NAD27 to WGS 84 (13) + Alaska Albers Area of Use: - name: Canada - NWT; Nunavut; Saskatchewan - bounds: (-136.46, 49.0, -60.72, 83.17) Promote CRS to 3D ------------------- .. versionadded:: 3.1 In PROJ 6+ you need to explicitly change your CRS to 3D if you have 2D CRS and you want the ellipsoidal height taken into account. .. code-block:: python >>> from pyproj import CRS, Transformer >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:2056", always_xy=True) >>> transformer.transform(8.37909, 47.01987, 1000) (2671499.8913080636, 1208075.1135782297, 1000.0) >>> transformer_3d = Transformer.from_crs( ... CRS("EPSG:4326").to_3d(), ... CRS("EPSG:2056").to_3d(), ... always_xy=True, ...) >>> transformer_3d.transform(8.37909, 47.01987, 1000) (2671499.8913080636, 1208075.1135782297, 951.4265527743846) Demote CRS to 2D ---------------- .. versionadded:: 3.6 With the need for explicit 3D CRS since PROJ 6+, one might need to retrieve their 2D version, for example to create another 3D CRS compound between a 2D CRS and a vertical CRS. .. code-block:: python >>> from pyproj import CRS, Transformer >>> from pyproj.crs import CompoundCRS >>> src_crs = CRS("EPSG:4979") # Any 3D CRS, here the 3D WGS 84 >>> vert_crs = CRS("EPSG:5773") # Any vertical CRS, here the EGM96 geoid >>> dst_crs = CompoundCRS(src_crs.name + vert_crs.name, components=[src_crs.to_2d(), vert_crs]) >>> transformer_3d = Transformer.from_crs(src_crs, dst_crs, always_xy=True) >>> transformer_3d.transform(8.37909, 47.01987, 1000) (8.37909, 47.01987, 951.7851086745321) Projected CRS Bounds ---------------------- .. versionadded:: 3.1 The boundary of the CRS is given in geographic coordinates. This is the recommended method for calculating the projected bounds. .. code-block:: python >>> from pyproj import CRS, Transformer >>> crs = CRS("EPSG:3857") >>> transformer = Transformer.from_crs(crs.geodetic_crs, crs, always_xy=True) >>> transformer.transform_bounds(*crs.area_of_use.bounds) (-20037508.342789244, -20048966.104014594, 20037508.342789244, 20048966.104014594) Multithreading -------------- As of version 3.1, these objects are thread-safe: - :class:`pyproj.crs.CRS` - :class:`pyproj.transformer.Transformer` If you have pyproj<3.1, you will need to create the object within the thread that uses it. Here is a simple demonstration: .. code-block:: python import concurrent.futures from pyproj import Transformer def transform_point(point): transformer = Transformer.from_crs(4326, 3857) return transformer.transform(point, point * 2) with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: for result in executor.map(transform_point, range(5)): print(result) Optimizing Single-Threaded Applications ---------------------------------------- If you have a single-threaded application that generates many objects, enabling the use of the global context can provide performance enhancements. For information about using the global context, see: :ref:`global_context` Here is an example where enabling the global context can help: .. code-block:: python import pyproj codes = pyproj.get_codes("EPSG", pyproj.enums.PJType.PROJECTED_CRS, False) crs_list = [pyproj.CRS.from_epsg(code) for code in codes] Caching pyproj objects ----------------------- If you are likely to re-create pyproj objects such as :class:`pyproj.transformer.Transformer` or :class:`pyproj.crs.CRS`, using a cache can help reduce the cost of re-creating the objects. Transformer ~~~~~~~~~~~~ .. code-block:: python from functools import lru_cache from pyproj import Transformer TransformerFromCRS = lru_cache(Transformer.from_crs) Transformer.from_crs(2263, 4326) # no cache TransformerFromCRS(2263, 4326) # cache Try it: .. code-block:: python from timeit import timeit timeit( "CachedTransformer(2263, 4326)", setup=( "from pyproj import Transformer; " "from functools import lru_cache; " "CachedTransformer = lru_cache(Transformer.from_crs)" ), number=1000000, ) timeit( "Transformer.from_crs(2263, 4326)", setup=("from pyproj import Transformer"), number=100, ) Without the cache, it takes around 2 seconds to do 100 iterations. With the cache, it takes 0.1 seconds to do 1 million iterations. CRS Example ~~~~~~~~~~~~ .. code-block:: python from functools import lru_cache from pyproj import CRS CachedCRS = lru_cache(CRS) crs = CRS(4326) # no cache crs = CachedCRS(4326) # cache Try it: .. code-block:: python from timeit import timeit timeit( "CachedCRS(4326)", setup=( "from pyproj import CRS; " "from functools import lru_cache; " "CachedCRS = lru_cache(CRS)" ), number=1000000, ) timeit( "CRS(4326)", setup=("from pyproj import CRS"), number=1000, ) Without the cache, it takes around 1 seconds to do 1000 iterations. With the cache, it takes 0.1 seconds to do 1 million iterations. .. _debugging-internal-proj: Debugging Internal PROJ ------------------------ .. versionadded:: 3.0.0 To get more debugging information from the internal PROJ code: 1. Set the :envvar:`PROJ_DEBUG` environment variable to the desired level. 2. Activate logging in `pyproj` with the devel `DEBUG`: More information available here: https://docs.python.org/3/howto/logging.html Here are examples to get started. Add handler to the `pyproj` logger: .. code-block:: python import logging console_handler = logging.StreamHandler() formatter = logging.Formatter("%(levelname)s:%(message)s") console_handler.setFormatter(formatter) logger = logging.getLogger("pyproj") logger.addHandler(console_handler) logger.setLevel(logging.DEBUG) Activate default logging config: .. code-block:: python import logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.DEBUG) pyproj-3.7.1/docs/api/000077500000000000000000000000001475425760300145635ustar00rootroot00000000000000pyproj-3.7.1/docs/api/aoi.rst000066400000000000000000000006261475425760300160710ustar00rootroot00000000000000Area of Interest ================== pyproj.aoi.AreaOfInterest -------------------------- .. note:: The backwards compatible import is `pyproj.transformer.AreaOfInterest` .. autoclass:: pyproj.aoi.AreaOfInterest :members: pyproj.aoi.AreaOfUse --------------------- .. autoclass:: pyproj.aoi.AreaOfUse :members: pyproj.aoi.BBox ----------------- .. autoclass:: pyproj.aoi.BBox :members: pyproj-3.7.1/docs/api/crs/000077500000000000000000000000001475425760300153525ustar00rootroot00000000000000pyproj-3.7.1/docs/api/crs/coordinate_operation.rst000066400000000000000000000120261475425760300223140ustar00rootroot00000000000000.. _coordinate_operation: Coordinate Operations ===================== CoordinateOperation -------------------- .. autoclass:: pyproj.crs.CoordinateOperation :members: :inherited-members: Param ------ .. autoclass:: pyproj._crs.Param :members: Grid ---- .. autoclass:: pyproj._crs.Grid :members: AlbersEqualAreaConversion --------------------------- .. autoclass:: pyproj.crs.coordinate_operation.AlbersEqualAreaConversion :members: :show-inheritance: :special-members: __new__ AzimuthalEquidistantConversion ------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.AzimuthalEquidistantConversion :members: :show-inheritance: :special-members: __new__ EquidistantCylindricalConversion -------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.EquidistantCylindricalConversion :members: :show-inheritance: :special-members: __new__ GeostationarySatelliteConversion -------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.GeostationarySatelliteConversion :members: :show-inheritance: :special-members: __new__ LambertAzimuthalEqualAreaConversion ----------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.LambertAzimuthalEqualAreaConversion :members: :show-inheritance: :special-members: __new__ LambertConformalConic1SPConversion ----------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.LambertConformalConic1SPConversion :members: :show-inheritance: :special-members: __new__ LambertConformalConic2SPConversion ----------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.LambertConformalConic2SPConversion :members: :show-inheritance: :special-members: __new__ LambertCylindricalEqualAreaConversion ------------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.LambertCylindricalEqualAreaConversion :members: :show-inheritance: :special-members: __new__ .. autoclass:: pyproj.crs.coordinate_operation.LambertCylindricalEqualAreaScaleConversion :members: :show-inheritance: :special-members: __new__ MercatorAConversion -------------------- .. autoclass:: pyproj.crs.coordinate_operation.MercatorAConversion :members: :show-inheritance: :special-members: __new__ MercatorBConversion ------------------- .. autoclass:: pyproj.crs.coordinate_operation.MercatorBConversion :members: :show-inheritance: :special-members: __new__ HotineObliqueMercatorBConversion --------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.HotineObliqueMercatorBConversion :members: :show-inheritance: :special-members: __new__ OrthographicConversion ----------------------- .. autoclass:: pyproj.crs.coordinate_operation.OrthographicConversion :members: :show-inheritance: :special-members: __new__ PlateCarreeConversion -------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.PlateCarreeConversion :members: :show-inheritance: :special-members: __new__ PolarStereographicAConversion ----------------------------- .. autoclass:: pyproj.crs.coordinate_operation.PolarStereographicAConversion :members: :show-inheritance: :special-members: __new__ PolarStereographicBConversion ----------------------------- .. autoclass:: pyproj.crs.coordinate_operation.PolarStereographicBConversion :members: :show-inheritance: :special-members: __new__ SinusoidalConversion --------------------- .. autoclass:: pyproj.crs.coordinate_operation.SinusoidalConversion :members: :show-inheritance: :special-members: __new__ StereographicConversion ------------------------ .. autoclass:: pyproj.crs.coordinate_operation.StereographicConversion :members: :show-inheritance: :special-members: __new__ UTMConversion ------------- .. autoclass:: pyproj.crs.coordinate_operation.UTMConversion :members: :show-inheritance: :special-members: __new__ TransverseMercatorConversion ---------------------------- .. autoclass:: pyproj.crs.coordinate_operation.TransverseMercatorConversion :members: :show-inheritance: :special-members: __new__ VerticalPerspectiveConversion ----------------------------- .. autoclass:: pyproj.crs.coordinate_operation.VerticalPerspectiveConversion :members: :show-inheritance: :special-members: __new__ RotatedLatitudeLongitudeConversion ---------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.RotatedLatitudeLongitudeConversion :members: :show-inheritance: :special-members: __new__ PoleRotationNetCDFCFConversion ---------------------------------- .. autoclass:: pyproj.crs.coordinate_operation.PoleRotationNetCDFCFConversion :members: :show-inheritance: :special-members: __new__ ToWGS84Transformation --------------------- .. autoclass:: pyproj.crs.coordinate_operation.ToWGS84Transformation :members: :show-inheritance: :special-members: __new__ pyproj-3.7.1/docs/api/crs/coordinate_system.rst000066400000000000000000000017011475425760300216360ustar00rootroot00000000000000.. _coordinate_system: Coordinate Systems ================== CoordinateSystem ------------------------ .. autoclass:: pyproj.crs.CoordinateSystem :members: :inherited-members: Axis ------------------------ .. autoclass:: pyproj._crs.Axis :members: :inherited-members: Ellipsoidal2DCS ------------------------ .. autoclass:: pyproj.crs.coordinate_system.Ellipsoidal2DCS :members: :show-inheritance: :special-members: __new__ Ellipsoidal3DCS ------------------------ .. autoclass:: pyproj.crs.coordinate_system.Ellipsoidal3DCS :members: :show-inheritance: :special-members: __new__ Cartesian2DCS ------------------------ .. autoclass:: pyproj.crs.coordinate_system.Cartesian2DCS :members: :show-inheritance: :special-members: __new__ VerticalCS ------------------------ .. autoclass:: pyproj.crs.coordinate_system.VerticalCS :members: :show-inheritance: :special-members: __new__ pyproj-3.7.1/docs/api/crs/crs.rst000066400000000000000000000026661475425760300167050ustar00rootroot00000000000000.. _crs: CRS === CRS ---- .. autoclass:: pyproj.crs.CRS :members: :inherited-members: :special-members: __init__ GeographicCRS ------------------------ .. autoclass:: pyproj.crs.GeographicCRS :members: :show-inheritance: :special-members: __init__ DerivedGeographicCRS ------------------------ .. autoclass:: pyproj.crs.DerivedGeographicCRS :members: :show-inheritance: :special-members: __init__ GeocentricCRS ------------------------ .. autoclass:: pyproj.crs.GeocentricCRS :members: :show-inheritance: :special-members: __init__ ProjectedCRS ----------------------- .. autoclass:: pyproj.crs.ProjectedCRS :members: :show-inheritance: :special-members: __init__ VerticalCRS ----------------------- .. autoclass:: pyproj.crs.VerticalCRS :members: :show-inheritance: :special-members: __init__ BoundCRS ----------------------- .. autoclass:: pyproj.crs.BoundCRS :members: :show-inheritance: :special-members: __init__ CompoundCRS ----------------------- .. autoclass:: pyproj.crs.CompoundCRS :members: :show-inheritance: :special-members: __init__ CustomConstructorCRS ------------------------ .. autoclass:: pyproj.crs.CustomConstructorCRS :members: :show-inheritance: :special-members: __init__ is_wkt ----------------- .. autofunction:: pyproj.crs.is_wkt is_proj ------------------ .. autofunction:: pyproj.crs.is_proj pyproj-3.7.1/docs/api/crs/datum.rst000066400000000000000000000017751475425760300172300ustar00rootroot00000000000000.. _datum: Datum ===== .. note:: PROJ >= 7.0.0 will have better support for aliases for datum names. Until then, you will need to use the full name of the datum. There is support currently for the old PROJ names for datums such as WGS84 and NAD83. Datum --------- .. autoclass:: pyproj.crs.Datum :members: :inherited-members: CustomDatum ------------ .. autoclass:: pyproj.crs.datum.CustomDatum :members: :show-inheritance: :special-members: __new__ Ellipsoid ---------- .. autoclass:: pyproj.crs.Ellipsoid :members: :inherited-members: CustomEllipsoid ---------------- .. autoclass:: pyproj.crs.datum.CustomEllipsoid :members: :show-inheritance: :special-members: __new__ PrimeMeridian -------------- .. autoclass:: pyproj.crs.PrimeMeridian :members: :inherited-members: CustomPrimeMeridian -------------------- .. autoclass:: pyproj.crs.datum.CustomPrimeMeridian :members: :show-inheritance: :special-members: __new__ pyproj-3.7.1/docs/api/crs/enums.rst000066400000000000000000000012051475425760300172310ustar00rootroot00000000000000Enumerations ============ DatumType ---------- .. autoclass:: pyproj.crs.enums.DatumType :members: CoordinateOperationType ------------------------ .. autoclass:: pyproj.crs.enums.CoordinateOperationType :members: Cartesian2DCSAxis ------------------ .. autoclass:: pyproj.crs.enums.Cartesian2DCSAxis :members: Ellipsoidal2DCSAxis -------------------- .. autoclass:: pyproj.crs.enums.Ellipsoidal2DCSAxis :members: Ellipsoidal3DCSAxis -------------------- .. autoclass:: pyproj.crs.enums.Ellipsoidal3DCSAxis :members: VerticalCSAxis --------------- .. autoclass:: pyproj.crs.enums.VerticalCSAxis :members: pyproj-3.7.1/docs/api/crs/index.rst000066400000000000000000000002661475425760300172170ustar00rootroot00000000000000CRS module API Documentation ============================ .. toctree:: :maxdepth: 2 :caption: Contents: crs coordinate_system coordinate_operation datum enums pyproj-3.7.1/docs/api/database.rst000066400000000000000000000020661475425760300170650ustar00rootroot00000000000000.. _database: Database ========= Methods that query the PROJ database for information. pyproj.database.get_units_map ----------------------------- .. note:: The backwards compatible import is `pyproj.get_units_map` .. autofunction:: pyproj.database.get_units_map .. autoclass:: pyproj.database.Unit pyproj.database.get_authorities -------------------------------- .. note:: The backwards compatible import is `pyproj.get_authorities` .. autofunction:: pyproj.database.get_authorities pyproj.database.get_codes -------------------------- .. note:: The backwards compatible import is `pyproj.get_codes` .. autofunction:: pyproj.database.get_codes pyproj.database.query_crs_info ------------------------------- .. autofunction:: pyproj.database.query_crs_info .. autoclass:: pyproj.database.CRSInfo pyproj.database.query_utm_crs_info ----------------------------------- .. autofunction:: pyproj.database.query_utm_crs_info pyproj.database.get_database_metadata --------------------------------------- .. autofunction:: pyproj.database.get_database_metadata pyproj-3.7.1/docs/api/datadir.rst000066400000000000000000000007601475425760300167300ustar00rootroot00000000000000.. _data_directory: Data Directory =============== pyproj.datadir.get_data_dir --------------------------- .. autofunction:: pyproj.datadir.get_data_dir pyproj.datadir.set_data_dir --------------------------- .. autofunction:: pyproj.datadir.set_data_dir pyproj.datadir.append_data_dir ------------------------------ .. autofunction:: pyproj.datadir.append_data_dir pyproj.datadir.get_user_data_dir --------------------------------- .. autofunction:: pyproj.datadir.get_user_data_dir pyproj-3.7.1/docs/api/enums.rst000066400000000000000000000004721475425760300164470ustar00rootroot00000000000000Enumerations ============ .. autoclass:: pyproj.enums.WktVersion :members: .. autoclass:: pyproj.enums.ProjVersion :members: .. autoclass:: pyproj.enums.TransformDirection :members: .. autoclass:: pyproj.enums.PJType :members: .. autoclass:: pyproj.enums.GeodIntermediateFlag :members: pyproj-3.7.1/docs/api/exceptions.rst000066400000000000000000000001071475425760300174740ustar00rootroot00000000000000Exceptions ========== .. automodule:: pyproj.exceptions :members: pyproj-3.7.1/docs/api/geod.rst000066400000000000000000000003341475425760300162330ustar00rootroot00000000000000Geod ==== pyproj.Geod ----------- .. autoclass:: pyproj.Geod :members: :show-inheritance: :inherited-members: :special-members: __init__ .. autoclass:: pyproj.geod.GeodIntermediateReturn :members: pyproj-3.7.1/docs/api/global_context.rst000066400000000000000000000013661475425760300203270ustar00rootroot00000000000000.. _global_context: Global Context ============== .. deprecated:: 3.7.0 No longer necessary as there is only one context per thread now. If you have a single-threaded application that generates many objects, enabling the use of the global context can provide performance enhancements. .. warning:: The global context is not thread safe. .. warning:: The global context maintains a connection to the database through the duration of each python session and is closed once the program terminates. How to enable: - Using :func:`pyproj.set_use_global_context`. - Using the environment variable `PYPROJ_GLOBAL_CONTEXT`. pyproj.set_use_global_context ----------------------------- .. autofunction:: pyproj.set_use_global_context pyproj-3.7.1/docs/api/index.rst000066400000000000000000000003711475425760300164250ustar00rootroot00000000000000API Documentation ================= .. toctree:: :maxdepth: 2 :caption: Contents: crs/index transformer geod proj database list datadir network sync global_context enums aoi exceptions show_versions pyproj-3.7.1/docs/api/list.rst000066400000000000000000000011261475425760300162700ustar00rootroot00000000000000Lists ===== pyproj.list.get_proj_operations_map ----------------------------------- .. note:: The backwards compatible import is `pyproj.get_proj_operations_map` .. autofunction:: pyproj.list.get_proj_operations_map pyproj.list.get_ellps_map -------------------------- .. note:: The backwards compatible import is `pyproj.get_ellps_map` .. autofunction:: pyproj.list.get_ellps_map pyproj.list.get_prime_meridians_map ------------------------------------ .. note:: The backwards compatible import is `pyproj.get_prime_meridians_map` .. autofunction:: pyproj.list.get_prime_meridians_map pyproj-3.7.1/docs/api/network.rst000066400000000000000000000006661475425760300170160ustar00rootroot00000000000000.. _network_api: PROJ Network Settings ====================== pyproj.network.set_network_enabled ----------------------------------- .. autofunction:: pyproj.network.set_network_enabled pyproj.network.is_network_enabled ---------------------------------- .. autofunction:: pyproj.network.is_network_enabled pyproj.network.set_ca_bundle_path ---------------------------------- .. autofunction:: pyproj.network.set_ca_bundle_path pyproj-3.7.1/docs/api/proj.rst000066400000000000000000000007401475425760300162700ustar00rootroot00000000000000Proj ==== `pyproj.Proj` is functionally equivalent to the `proj` command line tool in PROJ. The PROJ docs say:: The `proj` program is limited to converting between geographic and projection coordinates within one datum. pyproj.Proj ----------- .. autoclass:: pyproj.Proj :members: :inherited-members: :special-members: __init__, __call__ :show-inheritance: pyproj.proj.Factors ------------------- .. autoclass:: pyproj.proj.Factors :members: pyproj-3.7.1/docs/api/show_versions.rst000066400000000000000000000001571475425760300202300ustar00rootroot00000000000000Show Versions ============= pyproj.show_versions -------------------- .. autofunction:: pyproj.show_versions pyproj-3.7.1/docs/api/sync.rst000066400000000000000000000004451475425760300162740ustar00rootroot00000000000000Sync Transformation Grids ========================= pyproj.sync.get_transform_grid_list ------------------------------------ .. autofunction:: pyproj.sync.get_transform_grid_list pyproj.sync.get_proj_endpoint ------------------------------ .. autofunction:: pyproj.sync.get_proj_endpoint pyproj-3.7.1/docs/api/transformer.rst000066400000000000000000000024711475425760300176630ustar00rootroot00000000000000.. _transformer: Transformer =========== The `pyproj.Transformer` has the capabilities of performing 2D, 3D, and 4D (time) transformations. It can do anything that the PROJ command line programs :ref:`proj`, :ref:`cs2cs`, and :ref:`cct` can do. This means that it allows translation between any pair of definable coordinate systems, including support for datum transformation. .. warning:: The axis order may be swapped if the source and destination CRS's are defined as having the first coordinate component point in a northerly direction (See PROJ FAQ on `axis order `_). You can check the axis order with the :class:`pyproj.crs.CRS` class. If you prefer to keep your axis order as always x,y, you can use the `always_xy` option when creating the :class:`pyproj.transformer.Transformer`. pyproj.Transformer ------------------ .. autoclass:: pyproj.transformer.Transformer :members: pyproj.transformer.TransformerGroup ----------------------------------- .. autoclass:: pyproj.transformer.TransformerGroup :members: :special-members: __init__ pyproj.transform ---------------- .. autofunction:: pyproj.transformer.transform pyproj.itransform ----------------- .. autofunction:: pyproj.transformer.itransform pyproj-3.7.1/docs/build_crs.rst000066400000000000000000000123261475425760300165160ustar00rootroot00000000000000Building a Coordinate Reference System ====================================== .. versionadded:: 2.5.0 PROJ strings have the potential to lose much of the information about a coordinate reference system (CRS). More information: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems However, PROJ strings make it really simple to construct a CRS. This addition is meant to simplify the process of transitioning from the PROJ form of the string to the WKT form WKT. The CRS classes can be used in the :meth:`pyproj.transformer.Transformer.from_crs` method just like the :class:`pyproj.crs.CRS` class. The current set of classes does not cover every possible use case, but hopefully it is enough to get you started. If you notice something is missing that you need, feel free to open an issue on GitHub. Here are links to the API docs for the pieces you need to get started: - :ref:`crs` - :ref:`coordinate_operation` - :ref:`datum` - :ref:`coordinate_system` Geographic CRS -------------- This is a simple example of creating a lonlat projection. PROJ string:: +proj=longlat +datum=WGS84 +no_defs .. code-block:: python from pyproj.crs import GeographicCRS geog_crs = GeographicCRS() geog_wkt = geog_crs.to_wkt() This example is meant to show off different initialization methods. It can be simplified to not use the Ellipsoid or PrimeMeridian objects. PROJ string:: +proj=longlat +ellps=airy +pm=lisbon +no_defs .. code-block:: python from pyproj.crs import Ellipsoid, GeographicCRS, PrimeMeridian from pyproj.crs.datum import CustomDatum cd = CustomDatum( ellipsoid=Ellipsoid.from_epsg(7001), prime_meridian=PrimeMeridian.from_name("Lisbon"), ) geog_crs = GeographicCRS(datum=cd) geog_wkt = geog_crs.to_wkt() Projected CRS ------------- Simple example using defaults. PROJ string:: +proj=aea +lat_0=0 +lon_0=0 +lat_1=0 +lat_2=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs .. code-block:: python from pyproj.crs import ProjectedCRS from pyproj.crs.coordinate_operation import AlbersEqualAreaConversion aeaop = AlbersEqualAreaConversion(0, 0) proj_crs = ProjectedCRS(conversion=aeaop) crs_wkt = proj_crs.to_wkt() More complex example with custom parameters. PROJ string:: +proj=utm +zone=14 +a=6378137 +b=6356752 +pm=lisbon +units=m +no_defs .. code-block:: python from pyproj.crs import GeographicCRS, ProjectedCRS from pyproj.crs.coordinate_operation import UTMConversion from pyproj.crs.datum import CustomDatum, CustomEllipsoid ell = CustomEllipsoid(semi_major_axis=6378137, semi_minor_axis=6356752) cd = CustomDatum(ellipsoid=ell, prime_meridian="Lisbon") proj_crs = ProjectedCRS( conversion=UTMConversion(14), geodetic_crs=GeographicCRS(datum=cd) ) crs_wkt = proj_crs.to_wkt() Bound CRS --------- This is an example building a CRS with `towgs84`. PROJ string:: +proj=tmerc +lat_0=0 +lon_0=15 +k=0.9996 +x_0=2520000 +y_0=0 +ellps=intl +towgs84=-122.74,-34.27,-22.83,-1.884,-3.4,-3.03,-15.62 +units=m +no_defs .. code-block:: python from pyproj.crs import BoundCRS, Ellipsoid, GeographicCRS, ProjectedCRS from pyproj.crs.coordinate_operation import ( TransverseMercatorConversion, ToWGS84Transformation, ) from pyproj.crs.datum import CustomDatum import pyproj proj_crs = ProjectedCRS( conversion=TransverseMercatorConversion( latitude_natural_origin=0, longitude_natural_origin=15, false_easting=2520000, false_northing=0, scale_factor_natural_origin=0.9996, ), geodetic_crs=GeographicCRS( datum=CustomDatum(ellipsoid="International 1924 (Hayford 1909, 1910)") ), ) bound_crs = BoundCRS( source_crs=proj_crs, target_crs="WGS 84", transformation=ToWGS84Transformation( proj_crs.geodetic_crs, -122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62 ), ) crs_wkt = bound_crs.to_wkt() Compound CRS ------------- The PROJ string is quite lossy in this example, so it is not provided. .. code-block:: python from pyproj.crs import CompoundCRS, GeographicCRS, ProjectedCRS, VerticalCRS from pyproj.crs.coordinate_system import Cartesian2DCS, VerticalCS from pyproj.crs.coordinate_operation import LambertConformalConic2SPConversion vertcrs = VerticalCRS( name="NAVD88 height", datum="North American Vertical Datum 1988", vertical_cs=VerticalCS(), geoid_model="GEOID12B", ) projcrs = ProjectedCRS( name="NAD83 / Pennsylvania South", conversion=LambertConformalConic2SPConversion( latitude_false_origin=39.3333333333333, longitude_false_origin=-77.75, latitude_first_parallel=40.9666666666667, latitude_second_parallel=39.9333333333333, easting_false_origin=600000, northing_false_origin=0, ), geodetic_crs=GeographicCRS(datum="North American Datum 1983"), cartesian_cs=Cartesian2DCS(), ) compcrs = CompoundCRS( name="NAD83 / Pennsylvania South + NAVD88 height", components=[projcrs, vertcrs] ) crs_wkt = compcrs.to_wkt() pyproj-3.7.1/docs/build_crs_cf.rst000066400000000000000000000131551475425760300171670ustar00rootroot00000000000000.. _build_crs_cf: Managing CRS to and from CF ============================ http://cfconventions.org/cf-conventions/cf-conventions.html Exporting CRS to CF -------------------- When exporting a CRS to the Climate and Forecast (CF) conventions, you need both the grid mapping as well as the coordinate system. If you don't use the coordinate system, then you will lose the units of your projection. In this example, this is the CRS we will use: .. code-block:: python from pyproj import CRS crs = CRS("EPSG:4326") To get the grid mapping you use :meth:`pyproj.crs.CRS.to_cf`: .. versionadded:: 2.2.0 .. code-block:: python cf_grid_mapping = crs.to_cf() Contents of `cf_grid_mapping`:: {'crs_wkt': 'GEOGCRS["WGS 84",DATUM["World Geodetic System ' ....,ID["EPSG",4326]]', 'geographic_crs_name': 'WGS 84', 'grid_mapping_name': 'latitude_longitude', 'inverse_flattening': 298.257223563, 'longitude_of_prime_meridian': 0.0, 'prime_meridian_name': 'Greenwich', 'reference_ellipsoid_name': 'WGS 84', 'semi_major_axis': 6378137.0, 'semi_minor_axis': 6356752.314245179} To get the coordinate system, you use :meth:`pyproj.crs.CRS.cs_to_cf`: .. versionadded:: 3.0.0 .. code-block:: python cf_coordinate_system = crs.cs_to_cf() Contents of `cf_coordinate_system`:: [{'long_name': 'geodetic latitude coordinate', 'standard_name': 'latitude', 'units': 'degrees_north', 'axis': 'Y'}, {'long_name': 'geodetic longitude coordinate', 'standard_name': 'longitude', 'units': 'degrees_east', 'axis': 'X'}] Importing CRS from CF ---------------------- When importing a CRS from the Climate and Forecast (CF) conventions, you need both the grid mapping as well as the coordinate system. If you don't use the coordinate system, then you will lose the units of your projection. .. note:: If the CF `crs_wkt` attribute is available, the coordinate system is inside of the WKT and can be used to create the CRS in a single step. .. warning:: If building from grid mapping, be mindful of the axis order. https://github.com/cf-convention/cf-conventions/pull/224 Build the CRS from CF grid mapping: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In this example, this is the grid mapping and coordinate system we will use:: variables: double x(x) ; x:standard_name = "projection_x_coordinate" ; x:long_name = "Easting" ; x:units = "m" ; double y(y) ; y:standard_name = "projection_y_coordinate" ; y:long_name = "Northing" ; y:units = "m" ; int crsOSGB ; crsOSGB:grid_mapping_name = "transverse_mercator"; crsOSGB:semi_major_axis = 6377563.396 ; crsOSGB:inverse_flattening = 299.3249646 ; crsOSGB:longitude_of_prime_meridian = 0.0 ; crsOSGB:latitude_of_projection_origin = 49.0 ; crsOSGB:longitude_of_central_meridian = -2.0 ; crsOSGB:scale_factor_at_central_meridian = 0.9996012717 ; crsOSGB:false_easting = 400000.0 ; crsOSGB:false_northing = -100000.0 ; .. note:: If the units are meters as in this example, then no further changes are necessary. .. code-block:: python from pyproj import CRS crs = CRS.from_cf( { "grid_mapping_name": "transverse_mercator", "semi_major_axis": 6377563.396, "inverse_flattening": 299.3249646, "longitude_of_prime_meridian": 0.0, "latitude_of_projection_origin": 49.0, "longitude_of_central_meridian": -2.0, "scale_factor_at_central_meridian": 0.9996012717, "false_easting": 400000.0, "false_northing": -100000.0, } ) Modify the CRS with coordinate system: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. versionadded:: 3.0.0 .. note:: If the CF `crs_wkt` attribute is available, the coordinate system is inside of the WKT and can be used to create the CRS in a single step. .. warning:: Be mindful of the axis order. https://github.com/cf-convention/cf-conventions/pull/224 In this example, assume everything is the same as above. However, the units are instead `US_Survey_Foot`:: variables: double x(x) ; x:standard_name = "projection_x_coordinate" ; x:long_name = "Easting" ; x:units = "US_Survey_Foot" ; double y(y) ; y:standard_name = "projection_y_coordinate" ; y:long_name = "Northing" ; y:units = "US_Survey_Foot" ; ... In this case, you will need to get the unit conversion factor: https://github.com/SciTools/cf-units .. code-block:: python from cf_units import Unit from pyproj import CRS cf_unit = Unit("US_Survey_Foot") unit = { "type": "LinearUnit", "name": "US Survey Foot", "conversion_factor": cf_unit.convert(1, "m"), } cartesian_cs = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "CoordinateSystem", "subtype": "Cartesian", "axis": [ {"name": "Easting", "abbreviation": "E", "direction": "east", "unit": unit}, {"name": "Northing", "abbreviation": "N", "direction": "north", "unit": unit}, ], } crs = CRS.from_cf( { "grid_mapping_name": "transverse_mercator", "semi_major_axis": 6377563.396, "inverse_flattening": 299.3249646, "longitude_of_prime_meridian": 0.0, "latitude_of_projection_origin": 49.0, "longitude_of_central_meridian": -2.0, "scale_factor_at_central_meridian": 0.9996012717, "false_easting": 400000.0, "false_northing": -100000.0, }, cartesian_cs=cartesian_cs, ) pyproj-3.7.1/docs/cli.rst000066400000000000000000000001111475425760300153040ustar00rootroot00000000000000CLI ==== .. argparse:: :ref: pyproj.__main__.parser :prog: pyproj pyproj-3.7.1/docs/conf.py000077500000000000000000000037571475425760300153300ustar00rootroot00000000000000import importlib.metadata import os # Sphinx extensions extensions = [ "sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx.ext.napoleon", "sphinx.ext.intersphinx", "sphinxarg.ext", ] intersphinx_mapping = { "numpy": ("https://numpy.org/doc/stable/", None), "pandas": ("https://pandas.pydata.org/docs/", None), "proj": ("https://proj.org/", None), "python": ("https://docs.python.org/3", None), "shapely": ("https://shapely.readthedocs.io/en/stable/", None), "xarray": ("https://docs.xarray.dev/en/stable/", None), } # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # General information about the project. project = "pyproj" copyright = "2006-2018, Jeffrey Whitaker; 2019-2024, Open source contributors" author = "Jeffrey Whitaker" version = release = importlib.metadata.version("pyproj") exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = "material" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = os.getenv("PYPROJ_HTML_THEME", "furo") html_logo = "media/logo.png" html_favicon = "media/icon.png" # Add any paths that contain custom static files (such as style sheets) here, # html_static_path = ["_static"] # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [("cli", "pyproj", "pyproj CLI", [author], 1)] pyproj-3.7.1/docs/crs_compatibility.rst000066400000000000000000000165201475425760300202700ustar00rootroot00000000000000CRS Compatibility Guide for Geospatial Python ============================================== This is meant to be a guide to help you along the way of you use :class:`pyproj.crs.CRS` with other Python Geospatial libraries. .. note:: WKT2 is the best format for storing your CRS according to the `PROJ FAQ `__. osgeo/gdal ---------- https://github.com/osgeo/gdal Converting from `osgeo.osr.SpatialReference` to `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: python from osgeo.osr import SpatialReference from pyproj.crs import CRS osr_crs = SpatialReference() osr_crs.ImportFromEPSG(4326) if osgeo.version_info.major < 3: proj_crs = CRS.from_wkt(osr_crs.ExportToWkt()) else: proj_crs = CRS.from_wkt(osr_crs.ExportToWkt(["FORMAT=WKT2_2018"])) Converting from `pyproj.crs.CRS` to `osgeo.osr.SpatialReference` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. warning:: WKT2 is only supported in GDAL 3+ .. code-block:: python import osgeo from osgeo.osr import SpatialReference from pyproj.crs import CRS from pyproj.enums import WktVersion proj_crs = CRS.from_epsg(4326) osr_crs = SpatialReference() if osgeo.version_info.major < 3: osr_crs.ImportFromWkt(proj_crs.to_wkt(WktVersion.WKT1_GDAL)) else: osr_crs.ImportFromWkt(proj_crs.to_wkt()) rasterio -------- https://github.com/mapbox/rasterio Converting from `rasterio.crs.CRS` to `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If you have `rasterio >= 1.0.14`, then you can pass in the `rasterio.crs.CRS` directly:: import rasterio import rasterio.crs from pyproj.crs import CRS with rasterio.Env(OSR_WKT_FORMAT="WKT2_2018"): rio_crs = rasterio.crs.CRS.from_epsg(4326) proj_crs = CRS.from_user_input(rio_crs) Otherwise, you should use the `wkt` property:: import rasterio.crs from pyproj.crs import CRS with rasterio.Env(OSR_WKT_FORMAT="WKT2_2018"): rio_crs = rasterio.crs.CRS.from_epsg(4326) proj_crs = CRS.from_wkt(rio_crs.wkt) Converting from `pyproj.crs.CRS` to `rasterio.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. warning:: WKT2 is only supported in GDAL 3+ If you have rasterio >= 1.0.26 and GDAL 3+, then you can pass in the `pyproj.crs.CRS` directly:: import rasterio.crs from pyproj.crs import CRS proj_crs = CRS.from_epsg(4326) rio_crs = rasterio.crs.CRS.from_user_input(proj_crs) If you want to be compatible across GDAL/rasterio versions, you can do:: from packaging import version import rasterio import rasterio.crs from pyproj.crs import CRS from pyproj.enums import WktVersion proj_crs = CRS.from_epsg(4326) if version.parse(rasterio.__gdal_version__) < version.parse("3.0.0") rio_crs = rasterio.crs.CRS.from_wkt(proj_crs.to_wkt(WktVersion.WKT1_GDAL)) else: rio_crs = rasterio.crs.CRS.from_wkt(proj_crs.to_wkt()) fiona ------ https://github.com/Toblerity/Fiona Converting from `fiona` CRS to `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Fiona currently stores the CRS as a PROJ string dictionary in the `crs` attribute. As such, it is best to use the `crs_wkt` attribute. It is also useful to know that plans exist to add CRS class. Related GitHub issue `here `__. Example:: import fiona from pyproj.crs import CRS with fiona.Env(OSR_WKT_FORMAT="WKT2_2018"), fiona.open(...) as fds: proj_crs = CRS.from_wkt(fds.crs_wkt) Converting from `pyproj.crs.CRS` for `fiona` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. warning:: WKT2 is only supported in GDAL 3+ If you want to be compatible across GDAL versions, you can do:: from packaging import version import fiona from pyproj.crs import CRS proj_crs = CRS.from_epsg(4326) if version.parse(fiona.__gdal_version__) < version.parse("3.0.0"): fio_crs = proj_crs.to_wkt(WktVersion.WKT1_GDAL) else: # GDAL 3+ can use WKT2 fio_crs = dc_crs.to_wkt() # with fiona.open(..., "w", crs_wkt=fio_crs) as fds: # ... geopandas --------- https://github.com/geopandas/geopandas Also see the `geopandas guide for upgrading to use pyproj CRS class `__ Preparing `pyproj.crs.CRS` for `geopandas` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: python import fiona import geopandas from pyproj.crs import CRS from pyproj.enums import WktVersion proj_crs = CRS.from_epsg(4326) if version.parse(geopandas.__version__) >= version.parse("0.7.0"): # geopandas uses pyproj.crs.CRS geo_crs = proj_crs elif version.parse(geopandas.__version__) >= version.parse("0.6.0"): # this version of geopandas uses always_xy=True so WKT version is safe if version.parse(fiona.__gdal_version__) < version.parse("3.0.0"): geo_crs = proj_crs.to_wkt(WktVersion.WKT1_GDAL) else: # GDAL 3+ can use WKT2 geo_crs = dc_crs.to_wkt() else: geo_crs = dc_crs.to_proj4() `geopandas` to `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ :meth:`pyproj.crs.CRS.from_user_input` can handle anything across the `geopandas` versions. The only gotcha would be if it is `None`. .. code-block:: python import geopandas from pyproj.crs import CRS gdf = geopandas.read_file(...) proj_crs = CRS.from_user_input(gdf.crs) cartopy ------- https://github.com/SciTools/cartopy .. note:: These examples require cartopy 0.20+ Preparing `pyproj.crs.CRS` for `cartopy.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. warning:: This only works for CRS created with WKT2, PROJ JSON, or a spatial reference ID (i.e. EPSG) with the area of use defined. Otherwise, the x_limits and y_limits will not work. .. code-block:: python import cartopy.crs as ccrs from pyproj.crs import CRS # geographic proj_crs = CRS.from_epsg(4326) cart_crs = ccrs.CRS(proj_crs) # projected proj_crs = CRS.from_epsg(6933) cart_crs = ccrs.Projection(proj_crs) Preparing `cartopy.crs.CRS` for `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: `cartopy.crs.CRS` inherits from `pyproj.crs.CRS`, so it should behave like a `pyproj.crs.CRS`. .. code-block:: python from cartopy.crs import PlateCarree from pyproj.crs import CRS cart_crs = PlateCarree() proj_crs = CRS.from_user_input(cart_crs) pycrs ----- https://github.com/karimbahgat/PyCRS .. warning:: Currently does not support WKT2 Preparing `pyproj.crs.CRS` for `pycrs` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: python import pycrs from pyproj.crs import CRS proj_crs = CRS.from_epsg(4326) py_crs = pycrs.parse.from_ogc_wkt(proj_crs.to_wkt("WKT1_GDAL")) Preparing `pycrs` for `pyproj.crs.CRS` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: python import pycrs from pyproj.crs import CRS py_crs = pycrs.parse.from_epsg_code(4326) proj_crs = CRS.from_wkt(py_crs.to_ogc_wkt()) pyproj-3.7.1/docs/examples.rst000066400000000000000000000365261475425760300163760ustar00rootroot00000000000000.. _examples: Getting Started =============== There are examples of usage within the API documentation and tests. This section is to demonstrate recommended usage. Also see: :ref:`gotchas` Using CRS --------- For more usage examples and documentation see :class:`pyproj.crs.CRS`. Initializing CRS ~~~~~~~~~~~~~~~~ The :class:`pyproj.crs.CRS` class can be initialized in many different ways. Here are some examples of initialization. .. code:: python >>> from pyproj import CRS >>> crs = CRS.from_epsg(4326) >>> crs = CRS.from_string("EPSG:4326") >>> crs = CRS.from_proj4("+proj=latlon") >>> crs = CRS.from_user_input(4326) Converting CRS to a different format ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. warning:: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj4.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems .. code:: python >>> from pyproj import CRS >>> crs = CRS.from_epsg(4326) >>> crs.to_epsg() 4326 >>> crs.to_authority() ('EPSG', '4326') >>> crs = CRS.from_proj4("+proj=omerc +lat_0=-36 +lonc=147 +alpha=-54 +k=1 +x_0=0 +y_0=0 +gamma=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0") >>> crs Name: unknown Axis Info [cartesian]: - E[east]: Easting (metre) - N[north]: Northing (metre) Area of Use: - undefined Coordinate Operation: - name: Transformation from unknown to WGS84 - method: Position Vector transformation (geog2D domain) Datum: Unknown based on WGS84 ellipsoid - Ellipsoid: WGS 84 - Prime Meridian: Greenwich Source CRS: unknown >>> print(crs.to_wkt(pretty=True)) BOUNDCRS[ SOURCECRS[ PROJCRS["unknown", BASEGEOGCRS["unknown", DATUM["Unknown based on WGS84 ellipsoid", ELLIPSOID["WGS 84",6378137,298.257223563, LENGTHUNIT["metre",1], ID["EPSG",7030]]], ... PARAMETER["Z-axis rotation",0, ID["EPSG",8610]], PARAMETER["Scale difference",1, ID["EPSG",8611]]]] >>> from pyproj.enums import WktVersion >>> print(crs.to_wkt(WktVersion.WKT1_GDAL, pretty=True)) PROJCS["unknown", GEOGCS["unknown", DATUM["Unknown_based_on_WGS84_ellipsoid", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433, AUTHORITY["EPSG","9122"]]], PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"], PARAMETER["latitude_of_center",-36], PARAMETER["longitude_of_center",147], PARAMETER["azimuth",-54], PARAMETER["rectified_grid_angle",0], PARAMETER["scale_factor",1], PARAMETER["false_easting",0], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]], AXIS["Easting",EAST], AXIS["Northing",NORTH]] >>> from pprint import pprint >>> pprint(crs.to_cf()) {'azimuth_of_central_line': -54, 'crs_wkt': 'BOUNDCRS[SOURCECRS[PROJCRS["unknown",BASEGEOGCRS["unknown",DATUM["Unknown ' ... 'difference",1,ID["EPSG",8611]]]]', 'false_easting': 0.0, 'false_northing': 0.0, 'grid_mapping_name': 'oblique_mercator', 'horizontal_datum_name': 'Unknown based on WGS84 ellipsoid', 'inverse_flattening': 298.257223563, 'latitude_of_projection_origin': -36.0, 'longitude_of_prime_meridian': 0.0, 'longitude_of_projection_origin': 147.0, 'prime_meridian_name': 'Greenwich', 'reference_ellipsoid_name': 'WGS 84', 'scale_factor_at_projection_origin': 1.0, 'semi_major_axis': 6378137.0, 'semi_minor_axis': 6356752.314245179, 'towgs84': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]} Extracting attributes from CRS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There are many attributes you can pull from the :class:`pyproj.crs.CRS`. This is just a small subset of what is available. .. code:: python >>> crs = CRS("urn:ogc:def:crs,crs:EPSG::2393,crs:EPSG::5717") >>> crs Name: KKJ / Finland Uniform Coordinate System + N60 height Axis Info [cartesian|vertical]: - X[north]: Northing (metre) - Y[east]: Easting (metre) - H[up]: Gravity-related height (metre) Area of Use: - undefined Datum: Kartastokoordinaattijarjestelma (1966) - Ellipsoid: International 1924 - Prime Meridian: Greenwich Sub CRS: - KKJ / Finland Uniform Coordinate System - N60 height >>> crs.sub_crs_list [ Name: KKJ / Finland Uniform Coordinate System Axis Info [cartesian]: - X[north]: Northing (metre) - Y[east]: Easting (metre) Area of Use: - name: Finland - 25.5°E to 28.5°E onshore. Also all country. - bounds: (19.24, 59.75, 31.59, 70.09) Coordinate Operation: - name: Finland Uniform Coordinate System - method: Transverse Mercator Datum: Kartastokoordinaattijarjestelma (1966) - Ellipsoid: International 1924 - Prime Meridian: Greenwich , Name: N60 height Axis Info [vertical]: - H[up]: Gravity-related height (metre) Area of Use: - name: Finland - onshore. - bounds: (19.24, 59.75, 31.59, 70.09) Datum: Helsinki 1960 - Ellipsoid: undefined - Prime Meridian: undefined ] >>> cop = crs.sub_crs_list[0].coordinate_operation >>> print(cop.to_wkt(pretty=True)) CONVERSION["Finland Uniform Coordinate System", METHOD["Transverse Mercator", ID["EPSG",9807]], PARAMETER["Latitude of natural origin",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8801]], PARAMETER["Longitude of natural origin",27, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8802]], PARAMETER["Scale factor at natural origin",1, SCALEUNIT["unity",1], ID["EPSG",8805]], PARAMETER["False easting",3500000, LENGTHUNIT["metre",1], ID["EPSG",8806]], PARAMETER["False northing",0, LENGTHUNIT["metre",1], ID["EPSG",8807]]] >>> cop.method_code '9807' >>> cop.method_name 'Transverse Mercator' >>> cop.params [Param(name=Latitude of natural origin, auth_name=EPSG, code=8801, value=0.0, unit_name=degree, unit_auth_name=, unit_code=, unit_category=angular), ... Param(name=False northing, auth_name=EPSG, code=8807, value=0.0, unit_name=metre, unit_auth_name=, unit_code=, unit_category=linear)] Find UTM CRS by Latitude and Longitude --------------------------------------- .. note:: For more database methods see: :ref:`database`. .. code-block:: python from pyproj import CRS from pyproj.aoi import AreaOfInterest from pyproj.database import query_utm_crs_info utm_crs_list = query_utm_crs_info( datum_name="WGS 84", area_of_interest=AreaOfInterest( west_lon_degree=-93.581543, south_lat_degree=42.032974, east_lon_degree=-93.581543, north_lat_degree=42.032974, ), ) utm_crs = CRS.from_epsg(utm_crs_list[0].code) Transformations from CRS to CRS ------------------------------- Step 1: Inspect CRS definition to ensure proper area of use and axis order ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For more options available for inspection, usage examples, and documentation see :class:`pyproj.crs.CRS`. .. code:: python >>> from pyproj import CRS >>> crs_4326 = CRS.from_epsg(4326) >>> crs_4326 Name: WGS 84 Axis Info [ellipsoidal]: - Lat[north]: Geodetic latitude (degree) - Lon[east]: Geodetic longitude (degree) Area of Use: - name: World - bounds: (-180.0, -90.0, 180.0, 90.0) Datum: World Geodetic System 1984 - Ellipsoid: WGS 84 - Prime Meridian: Greenwich >>> crs_26917 = CRS.from_epsg(26917) >>> crs_26917 Name: NAD83 / UTM zone 17N Axis Info [cartesian]: - E[east]: Easting (metre) - N[north]: Northing (metre) Area of Use: - name: North America - 84°W to 78°W and NAD83 by country - bounds: (-84.0, 23.81, -78.0, 84.0) Coordinate Operation: - name: UTM zone 17N - method: Transverse Mercator Datum: North American Datum 1983 - Ellipsoid: GRS 1980 - Prime Meridian: Greenwich Note that `crs_4326` has the latitude (north) axis first and the `crs_26917` has the easting axis first. This means that in the transformation, we will need to input the data with latitude first and longitude second. Also, note that the second projection is a UTM projection with bounds (-84.0, 23.81, -78.0, 84.0) which are in the form (min_x, min_y, max_x, max_y), so the transformation input/output should be within those bounds for best results. Step 2: Create Transformer to convert from CRS to CRS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The :class:`pyproj.transformer.Transformer` can be initialized with anything supported by :meth:`pyproj.crs.CRS.from_user_input`. There are a couple of examples added here for demonstration. For more usage examples and documentation, see :class:`pyproj.transformer.Transformer`. .. code:: python >>> from pyproj import Transformer >>> transformer = Transformer.from_crs(crs_4326, crs_26917) >>> transformer = Transformer.from_crs(4326, 26917) >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:26917") >>> transformer Inverse of NAD83 to WGS 84 (1) + UTM zone 17N >>> transformer.transform(50, -80) (571666.4475041276, 5539109.815175673) If you prefer to always have the axis order in the x,y or lon,lat order, you can use the `always_xy` option when creating the transformer. .. code:: python >>> from pyproj import Transformer >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:26917", always_xy=True) >>> transformer.transform(-80, 50) (571666.4475041276, 5539109.815175673) Converting between geographic and projection coordinates within one datum ------------------------------------------------------------------------- Step 1: Retrieve the geodetic CRS based on original CRS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code:: python >>> from pyproj import CRS >>> crs = CRS.from_epsg(3857) >>> crs Name: WGS 84 / Pseudo-Mercator Axis Info [cartesian]: - X[east]: Easting (metre) - Y[north]: Northing (metre) Area of Use: - name: World - 85°S to 85°N - bounds: (-180.0, -85.06, 180.0, 85.06) Coordinate Operation: - name: Popular Visualisation Pseudo-Mercator - method: Popular Visualisation Pseudo Mercator Datum: World Geodetic System 1984 - Ellipsoid: WGS 84 - Prime Meridian: Greenwich >>> crs.geodetic_crs Name: WGS 84 Axis Info [ellipsoidal]: - Lat[north]: Geodetic latitude (degree) - Lon[east]: Geodetic longitude (degree) Area of Use: - name: World - bounds: (-180.0, -90.0, 180.0, 90.0) Datum: World Geodetic System 1984 - Ellipsoid: WGS 84 - Prime Meridian: Greenwich Step 2: Create Transformer to convert from geodetic CRS to CRS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code:: python >>> proj = Transformer.from_crs(crs.geodetic_crs, crs) >>> proj Popular Visualisation Pseudo-Mercator Area of Use: - name: World - bounds: (-180.0, -90.0, 180.0, 90.0) >>> proj.transform(12, 15) (1669792.3618991035, 1345708.4084091093) 4D Transformations with Time ---------------------------- .. note:: If you are doing a transformation with a CRS that is time based, it is recommended to include the time in the transformation operation. .. code:: python >>> transformer = Transformer.from_crs(7789, 8401) >>> transformer ITRF2014 to ETRF2014 (1) >>> transformer.transform(xx=3496737.2679, yy=743254.4507, zz=5264462.9620, tt=2019.0) (3496737.757717311, 743253.9940103051, 5264462.701132784, 2019.0) Geodesic calculations --------------------- This is useful if you need to calculate the distance between two points or the area of a geometry on Earth's surface. For more examples of usage and documentation, see :class:`pyproj.Geod`. Creating Geod class ~~~~~~~~~~~~~~~~~~~ This example demonstrates creating a :class:`pyproj.Geod` using an ellipsoid name as well as deriving one using a :class:`pyproj.crs.CRS`. .. code:: python >>> from pyproj import CRS, Geod >>> geod_clrk = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> geod_clrk Geod(ellps='clrk66') >>> geod_wgs84 = CRS("EPSG:4326").get_geod() >>> geod_wgs84 Geod('+a=6378137 +f=0.0033528106647475126') Geodesic line length ~~~~~~~~~~~~~~~~~~~~ Calculate the geodesic length of a line (See: :meth:`pyproj.Geod.line_length`): .. code:: python >>> from pyproj import Geod >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> geod = Geod(ellps="WGS84") >>> total_length = geod.line_length(lons, lats) >>> f"{total_length:.3f}" '14259605.611' Calculate the geodesic length of a shapely geometry (See: :meth:`pyproj.Geod.geometry_length`): .. code:: python >>> from pyproj import Geod >>> from shapely.geometry import Point, LineString >>> line_string = LineString([Point(1, 2), Point(3, 4)])) >>> geod = Geod(ellps="WGS84") >>> total_length = geod.geometry_length(line_string) >>> f"{total_length:.3f}" '313588.397' Geodesic area ~~~~~~~~~~~~~ Calculate the geodesic area and perimeter of a polygon (See: :meth:`pyproj.Geod.polygon_area_perimeter`): .. code:: python >>> from pyproj import Geod >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> poly_area, poly_perimeter = geod.polygon_area_perimeter(lons, lats) >>> f"{poly_area:.3f} {poly_perimeter:.3f}" '13376856682207.406 14710425.407' Calculate the geodesic area and perimeter of a shapely polygon (See: :meth:`pyproj.Geod.geometry_area_perimeter`): .. code:: python >>> from pyproj import Geod >>> from shapely.geometry import LineString, Point, Polygon >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> poly_area, poly_perimeter = geod.geometry_area_perimeter( Polygon( LineString([Point(1, 1), Point(1, 10), Point(10, 10), Point(10, 1)]), holes=[LineString([Point(1, 2), Point(3, 4), Point(5, 2)])], ) ) >>> f"{poly_area:.3f} {poly_perimeter:.3f}" '-944373881400.339 3979008.036' pyproj-3.7.1/docs/gotchas.rst000066400000000000000000000223651475425760300162040ustar00rootroot00000000000000.. _gotchas: Gotchas/FAQ =========== This is a page for some suggestions, gotchas, and FAQs. Also see: - :ref:`examples` - :ref:`PROJ FAQ ` What are the best formats to store the CRS information? -------------------------------------------------------- In general, `Well-Known Text (WKT) `__ or `Spatial Reference ID (SRID) `__, such as EPSG codes, are the preferred formats to describe a CRS. .. note:: WKT2 is preferred over WKT1. PROJ strings can be lossy for storing CRS information. If you can avoid it, it is best to not use them. Additionally, PROJ strings will likely not be supported in future major version of PROJ for storing CRS information. More info: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems Axis order changes in PROJ 6+ ----------------------------- - https://proj.org/faq.html#why-is-the-axis-ordering-in-proj-not-consistent - See warning at the top of :ref:`transformer` - Examples of how to handle it: :ref:`examples` - :ref:`min_confidence` `+init=:` should be replaced with `:` ----------------------------------------------------------------------- The `+init=:` syntax is deprecated and will be removed in future versions of PROJ. Also, if you use the `+init` syntax, you may have problems initializing projections when the other syntax works. .. code-block:: python >>> from pyproj import CRS >>> CRS("ESRI:54009") Name: World_Mollweide Axis Info [cartesian]: - E[east]: Easting (metre) - N[north]: Northing (metre) Area of Use: - name: World - bounds: (-180.0, -90.0, 180.0, 90.0) Coordinate Operation: - name: World_Mollweide - method: Mollweide Datum: World Geodetic System 1984 - Ellipsoid: WGS 84 - Prime Meridian: Greenwich >>> CRS("+init=ESRI:54009") ... pyproj.exceptions.CRSError: Invalid projection: +init=ESRI:54009 +type=crs: (Internal Proj Error: proj_create: cannot expand +init=ESRI:54009 +type=crs) Proj (Not a generic latitude/longitude to projection converter) --------------------------------------------------------------- :class:`pyproj.Proj` is limited to converting between geographic and projection coordinates within one datum. If you have coordinates in latitude and longitude, and you want to convert it to your projection, it is recommended to use the :class:`pyproj.transformer.Transformer` as it takes into account datum shifts. You likely want to start from `EPSG:4326` (WGS84) for coordinates as latitude and longitude. .. code-block:: python >>> from pyproj import CRS >>> crs_4326 = CRS("WGS84") >>> crs_4326 Name: WGS 84 Axis Info [ellipsoidal]: - Lat[north]: Geodetic latitude (degree) - Lon[east]: Geodetic longitude (degree) Area of Use: - name: World - bounds: (-180.0, -90.0, 180.0, 90.0) Datum: World Geodetic System 1984 - Ellipsoid: WGS 84 - Prime Meridian: Greenwich Then, use the :class:`pyproj.transformer.Transformer` to transform from latitude and longitude to your projection as you might have a projection with a different datum. .. code-block:: python >>> crs_proj = CRS("EPSG:28992") >>> crs_proj Name: Amersfoort / RD New Axis Info [cartesian]: - X[east]: Easting (metre) - Y[north]: Northing (metre) Area of Use: - name: Netherlands - onshore. - bounds: (3.2, 50.75, 7.22, 53.7) Coordinate Operation: - name: RD New - method: Oblique Stereographic Datum: Amersfoort - Ellipsoid: Bessel 1841 - Prime Meridian: Greenwich >>> crs_proj.datum == crs_4326.datum False >>> from pyproj import Transformer >>> transformer = Transformer.from_crs(crs_4326, crs_proj) >>> transformer.transform(52.067567, 5.068913) (133175.3690698233, 453300.86739169655) If you use :class:`pyproj.Proj`, it will use the geodetic CRS with from the projected CRS with the same datum to do the transformation, which may not be what you want. .. code-block:: python >>> from pyproj import Proj >>> Proj('epsg:28992')(5.068913, 52.067567) (133148.22970574044, 453192.24450392975) >>> transg = Transformer.from_crs(crs_proj.geodetic_crs, crs_proj) >>> transg.transform(52.067567, 5.068913) (133148.22970574044, 453192.24450392975) .. _min_confidence: Why does the EPSG code return when using `EPSG:xxxx` and not with `+init=EPSG:xxxx`? ------------------------------------------------------------------------------------ From: https://gis.stackexchange.com/a/326919/144357 The reason that the EPSG code does not appear with the CRS initialized with the `init=` syntax is that the CRS are different. .. code-block:: python >>> from pyproj import CRS >>> crs_deprecated = CRS(init="epsg:4544") >>> crs = CRS("EPSG:4544") >>> crs == crs_deprecated False Upon further inspection of the `Axis Info` section, you can see that the difference is in the **axis order**. .. code-block:: python >>> crs_deprecated Name: CGCS2000 / 3-degree Gauss-Kruger CM 105E Axis Info [cartesian]: - E[east]: Easting (metre) - N[north]: Northing (metre) Area of Use: - name: China - 103.5°E to 106.5°E - bounds: (103.5, 22.5, 106.5, 42.21) Coordinate Operation: - name: Gauss-Kruger CM 105E - method: Transverse Mercator Datum: China 2000 - Ellipsoid: CGCS2000 - Prime Meridian: Greenwich >>> crs Name: CGCS2000 / 3-degree Gauss-Kruger CM 105E Axis Info [cartesian]: - X[north]: Northing (metre) - Y[east]: Easting (metre) Area of Use: - name: China - 103.5°E to 106.5°E - bounds: (103.5, 22.5, 106.5, 42.21) Coordinate Operation: - name: Gauss-Kruger CM 105E - method: Transverse Mercator Datum: China 2000 - Ellipsoid: CGCS2000 - Prime Meridian: Greenwich The reason the `min_confidence` parameter in :meth:`pyproj.crs.CRS.to_epsg` and :meth:`pyproj.crs.CRS.to_authority` exists is because you can initialize a CRS in several different methods and some of them do not always correspond to an EPSG or authortiy code, but it can be close enough. For example, if you have a WKT/PROJ string and you use it to create the CRS instance, in most cases you want to be sure that the EPSG code given by to_epsg will give you a CRS instance similar to the one created by the WKT/PROJ string. However, if an EPSG code does not exist that matches you WKT/PROJ string with a `min_confidence` you don't want to get that EPSG code back as it will make you think that the WKT/PROJ string and the EPSG code are one and the same when they are not. However, if you are only wanting to get the EPSG code that is closest to the PROJ/WKT string, then you can reduce your min_confidence to a threshold you are comfortable with. Here is an example of that: .. code-block:: python >>> crs_deprecated = CRS("+init=epsg:4326") >>> crs_deprecated.to_epsg(100) >>> crs_deprecated.to_epsg(70) >>> crs_deprecated.to_epsg(20) 4326 >>> crs_latlon = CRS("+proj=latlon") >>> crs_latlon.to_epsg(100) >>> crs_latlon.to_epsg(70) 4326 >>> crs_epsg = CRS.from_epsg(4326) >>> crs_epsg.to_epsg(100) 4326 >>> crs_wkt = CRS(crs_epsg.to_wkt()) >>> crs_wkt.to_epsg(100) 4326 >>> crs_wkt == crs_epsg True >>> crs_epsg == crs_latlon False >>> crs_epsg == crs_deprecated False Internal PROJ Error ... SQLite error on SELECT ---------------------------------------------- The PROJ database is based on the EPSG database. With each release, there is a good chance that there are database updates. If you have multiple versions of PROJ installed on your systems and the search path for the data directory becomes mixed up, you may see an error message like: `SQLite error on SELECT`. This is likely due to a version of PROJ attempting to use an incompatible database. Debugging tips: - To get data directory being used: :func:`pyproj.datadir.get_data_dir` - The order for searching for the data directory can be found in the docstrings of :func:`pyproj.datadir.get_data_dir` - To change the data directory: :func:`pyproj.datadir.set_data_dir` .. _upgrade_transformer: Upgrading to pyproj 2 from pyproj 1 ----------------------------------- We recommended using the :class:`pyproj.transformer.Transformer` and :class:`pyproj.crs.CRS` in place of the :class:`pyproj.Proj` and :meth:`pyproj.transformer.transform`. Also see: - :ref:`examples` - :ref:`optimize_transformations` .. warning:: :meth:`pyproj.transformer.transform` and :meth:`pyproj.transformer.itransform` are deprecated. pyproj 1 style: >>> from functools import partial >>> from pyproj import Proj, transform >>> proj_4326 = Proj(init="epsg:4326") >>> proj_3857 = Proj(init="epsg:3857") >>> transformer = partial(transform, proj_4326, proj_3857) >>> transformer(12, 12) pyproj 2 style: >>> from pyproj import Transformer >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857") >>> transformer.transform(12, 12) pyproj-3.7.1/docs/history.rst000066400000000000000000000655371475425760300162650ustar00rootroot00000000000000Change Log ========== 3.7.1 ------ - WHL: Add wheels for musllinux (pull #1461) - WHL: MacOS minimum deployment target moved to 13 (pull #1475) - WHL: Wheels contain PROJ 9.5.1 (pull #1477) - MNT: Cython 3.1+ fixes (pull #1452) - MNT: Remove use of utcnow() and change to naive datetimes instead (pull #1450) - TST: remove checking is python >= 3.4 (pull #1446) - TST: Add assert statements at the end of tests (pull #1453) - LNT: Setup ruff & lint fixes (pull #1455 #1456) 3.7.0 ------ - WHL: Wheels contain PROJ 9.4.1 (pull #1423) - DEP: Minimum supported Python version 3.10 (pull #1357) - DEP: Minimum PROJ version 9.2 (pull #1394) - ENH: Add :meth:`CRS.is_deprecated` and :meth:`CRS.get_non_deprecated` (pull #1383) - PERF: thread local context (issue #1133) - ENH: Add runtime & compiled PROJ versions (discussion #1420) - BUG: Handle changes to HotineObliqueMercatorBConversion (issue #1429) 3.6.1 ------ - WHL: Wheels contain PROJ 9.3.0 (issue #1327) - BUG: Remove pkg_resources from setup.py (issue #1313) - BUG: Cython 3 compatibility fixes (issue #1321) 3.6.0 ------ - DEP: Minimum supported Python version 3.9 (issue #1111) - WHL: Wheels contain PROJ 9.2.1 (pull #1291) - ENH: Added allow_superseded kwargs to :class:`pyproj.transformer.TransformerGroup` (pull #1269) - ENH: Added :meth:`CRS.to_2d` to demote 3D CRS to 2D (issue #1266) - ENH: Added parameter `output_axis_rule` to :meth:`CRS.to_wkt` (pull #1287) - BUG: fix Geod.npts NaN handling (issue #1282) 3.5.0 ------ - DEP: Minimum PROJ version 9.0 (issue #1223) - WHL: PROJ 9.2 in wheels (pull #1243) - ENH: Add `return_back_azimuth: bool` to allow compatibility between the azimuth output of the following functions (issue #1163): `fwd` and `fwd_intermediate`, `inv` and `inv_intermediate`, Note: BREAKING CHANGE for the default value `return_back_azimuth=True` in the functions `fwd_intermediate` and `inv_intermediate` to mach the default value in `fwd` and `inv` - ENH: Added only_best kwarg to :meth:`.Transformer.from_crs` (issue #1228) - PERF: Optimize point transformations (pull #1204) - PERF: Optimize for single point in Geod fwd/inv functions (pull #1206) - REF: Raise error when :meth:`.CRS.to_wkt`, :meth:`.CRS.to_json`, or :meth:`.CRS.to_proj4` returns None (issue #1036) - CLN: Remove `AzumuthalEquidistantConversion` & :class:`LambertAzumuthalEqualAreaConversion`. :class:`AzimuthalEquidistantConversion` & :class:`LambertAzimuthalEqualAreaConversion` should be used instead (pull #1219) - BUG: Fix Derived Projected CRS support (issue #1182) - BUG: Add horizontal_datum_name for geographic CRS in :meth:`.CRS.to_cf` (issue #1251) - BUG: Add datum ensemble support to :class:`.GeographicCRS` (pull #1255) 3.4.1 ----- - WHL: Add win32 to build_wheels matrix (pull #1169) - BUG: Changed so that the setup.cfg depends on the version code in the __init__.py instead of the other way around (issuue #1155) - BUG: Fix :meth:`.CRS.to_cf` for Pole rotation GRIB convention (pull #1167) - BUG: Fix :meth:`.CRS.to_authority` memory leak (pull #1178) - REF: Use upper case EPSG code when creating CRS (pull #1162) 3.4.0 ----- - WHL: Python 3.11 Wheels (issue #1110) - WHL: Wheels contain PROJ 9.1.0 (pull #1132) - DEP: Minimum PROJ version 8.2 (issue #1011) - BUG: Fix transformer list for 3D transformations in :class:`.TransformerGroup` (discussion #1072) - ENH: Added authority, accuracy, and allow_ballpark kwargs to :class:`.TransformerGroup` (pull #1076) - ENH: Added ``force_over`` kwarg to :meth:`.Transformer.from_crs` (issue #997) - ENH: Added :meth:`.Transformer.get_last_used_operation` (issue #1071) - CLN: Remove deprecated ``skip_equivalent`` kwarg from transformers and ``errcheck`` kwarg from :meth:`.CRS.from_cf` (pull #1077) - REF: use regex to process PROJ strings in :meth:`.CRS.to_dict` (pull #1086) - BUG: :class:`.MercatorAConversion` defined only for lat_0 = 0 (issue #1089) - BUG: Add support for `PROJ_DATA` environment variable (issue #1097) - BUG: Ensure numpy masked arrays stay masked after projection (issue #1102) - BLD: Don't specify runtime_library_dirs on Cygwin (pull #1120) - BUG: Fix finding PROJ version with PROJ_LIB and PROJ 9.1+ (issue #1127) 3.3.1 ------- - WHL: Wheels for Linux are manylinux2014 (pip 19.3+) - BUG: Complete database stub file with query_utm_crs_info() signature (issue #1044) - BUG: Reorder deps in show_versions for setuptools issue (issue #1017) - BUG: remove CustomConstructorCRS @abstractmethod decorator (pull #1018) - BUG: Correct type annotation for AreaofUse.bounds (issue #1012) - BUG: :func:`pyproj.datadir.get_data_dir` support for conda Windows (issue #1029) - ENH: warn when :meth:`pyproj.crs.CRS.to_wkt`, :meth:`pyproj.crs.CRS.to_proj4`, or :meth:`pyproj.crs.CRS.to_json()` returns None (issue #1036) - ENH: Added support for int-like strings and numpy dtypes (issues #1026 and #1835) - ENH: Added support to pickle :class:`pyproj.transformer.Transformer` (issues #1058) 3.3.0 ------- - WHL: Wheels contain PROJ 8.2.0 - DEP: Minimum supported Python version 3.8 (issue #930) - DEP: Minimum PROJ version 8.0 (issue #940) - BUG: Prepend "Derived" to CRS type name if CRS is derived (issue #932) - BUG: Improved handling of inf values in :meth:`pyproj.transformer.Transformer.transform_bounds` (pull #961) - BUG: CRS CF conversions mismatch of PROJ parameters in rotated pole (issue #948) - ENH: Add support for transforming bounds at the poles in :meth:`pyproj.transformer.Transformer.transform_bounds` (pull #962) - ENH: Added :attr:`pyproj.transformer.Transformer.source_crs` & :attr:`pyproj.transformer.Transformer.target_crs` (pull #976) - ENH: Added :class:`pyproj.crs.coordinate_operation.PoleRotationNetCDFCFConversion` (issue #948) - ENH: Added :func:`pyproj.database.get_database_metadata` (issue #990) - ENH: Added PROJ database metadata to :func:`pyproj.show_versions` (issue #990) 3.2.1 ------ - REF: declare specific python types in cython (pull #928) - REF: Use cython string decoding (pull #929) - BUG: Return multiple authorities with :attr:`pyproj.crs.CRS.list_authority` (pull #943) - BUG: CRS CF conversions ensure lon_0 = north_pole_grid_longitude + 180 (issue #927) - BUG: CRS CF conversions ensure Pole rotation (netCDF CF convention) conversion works (issue #927) 3.2.0 ------ - WHL: Wheels contain PROJ 8.1.1 - DOC: Add new pyproj logo (issue #700) - REF: Handle deprecation of proj_context_set_autoclose_database (issue #866) - REF: Make CRS methods inheritable (issue #847) - ENH: Added :attr:`pyproj.crs.CRS.is_derived` (pull #902) - ENH: Added :attr:`pyproj.crs.GeocentricCRS` (pull #903) - ENH: Added :attr:`pyproj.crs.CRS.list_authority` (issue #918) - ENH: Added `inplace` kwarg to :meth:`pyproj.transformer.Transformer.transform` (issue #906) - PERF: Disable unnecessary copy in dtype conversion for buffer (pull #904) - DOC: Improve FAQ text about CRS formats (issue #789) - BUG: Add PyPy cython array implementation (issue #854) - BUG: Fix spelling for :class:`pyproj.crs.coordinate_operation.AzimuthalEquidistantConversion` and :class:`pyproj.crs.coordinate_operation.LambertAzimuthalEqualAreaConversion` (issue #882) - BUG: Make datum name match exact in :func:`pyproj.database.query_utm_crs_info` (pull #887) - BUG: Update :class:`pyproj.enums.GeodIntermediateFlag` for future Python compatibility (issue #855) - BUG: Hide unnecessary PROJ ERROR from proj_crs_get_coordoperation (issue #873) - BUG: Fix pickling for CRS builder classes (issue #897) - CLN: Remove `ignore_axis_order` kwarg from :meth:`pyproj.crs.CRS.is_exact_same` as it was added by accident (pull #904) - CLN: remove numeric/numarrays support (pull #908) - LNT: Add pylint & address issues (pull #909) - DEP: Remove distutils dependency (pull #917) 3.1.0 ----- * WHL: Wheels contain PROJ 8.0.1 * DEP: Minimum supported Python version 3.7 (issue #790) * REF: Multithread safe CRS, Proj, & Transformer (issue #782) * BUG: Disallow NaN values with AreaOfInterest & BBox (issue #788) * ENH: Pretty format PROJ string support (issue #764) * ENH: Added :meth:`pyproj.transformer.Transformer.to_proj4` (pull #798) * ENH: Added authority, accuracy, and allow_ballpark kwargs to :meth:`pyproj.transformer.Transformer.from_crs` (issue #754) * ENH: Added support for "AUTH:CODE" input to :meth:`pyproj.transformer.Transformer.from_pipeline` (issue #755) * ENH: Added :meth:`pyproj.crs.CRS.to_3d` (pull #808) * ENH: Added :meth:`pyproj.transformer.Transformer.transform_bounds` (issue #809) * ENH: Added :attr:`pyproj.crs.CRS.is_compound` (pull #823) * ENH: Added `initial_idx` and `terminal_index` kwargs to :meth:`pyproj.Geod.npts` (pull #841) * ENH: Added :meth:`pyproj.Geod.inv_intermediate` & :meth:`pyproj.Geod.fwd_intermediate` (pull #841) * REF: Skip transformations if `noop` & deprecate `skip_equivalent` (pull #824) 3.0.1 ----- * WHL: Wheels contain PROJ 7.2.1 * Use `proj_context_errno_string` in PROJ 8+ due to deprecation (issue #760) * BUG: Allow transformations with empty arrays (issue #766) * BUG: support numpy objects in CRS.from_cf (issue #773) 3.0.0 ----- * Minimum supported Python version 3.6 (issue #499) * Minimum PROJ version 7.2 (issues #599 & #689) * WHL: Removed datumgrids from wheels because not needed with RFC 4 (pull #628) * WHL: Wheels contain PROJ 7.2 * ENH: Added :ref:`network_api` (#675, #691, #695) * ENH: Added ability to use global context (issue #661) * ENH: Added transformation grid sync API/CLI (issue #572) * ENH: Support objects with '__array__' method (pandas.Series, xarray.DataArray, dask.array.Array) (issue #573) * ENH: Added :func:`pyproj.datadir.get_user_data_dir` (pull #636) * ENH: Added :attr:`pyproj.transformer.Transformer.is_network_enabled` (issue #629) * ENH: Added :meth:`pyproj.transformer.TransformerGroup.download_grids` (pull #643) * ENH: Use 'proj_get_units_from_database' in :func:`pyproj.database.get_units_map` & cleanup :func:`pyproj.database.get_codes` (issue #619) * ENH: Added support for radians for Proj & Transformer.from_pipeline & use less gil (issue #612) * ENH: Datum.from_name default to check all datum types (issue #606) * ENH: Use from_user_input in __eq__ when comparing CRS sub-classes (i.e. PrimeMeridian, Datum, Ellipsoid, etc.) (issue #606) * ENH: Add support for coordinate systems with CRS using CF conventions (issue #536) * ENH: Use `proj_is_equivalent_to_with_ctx` in the place of `proj_is_equivalent_to` internally (issue #666) * BUG: Add support for identifying engineering/parametric/temporal datums (issue #670) * ENH: Add support for temporal CRS CF coordinate system (issue #672) * ENH: Added support for debugging internal PROJ (pull #696) * ENH: Added pathlib support for data directory methods (pull #702) * ENH: Added :func:`pyproj.database.query_crs_info` (pull #703) * ENH: Added :func:`pyproj.database.query_utm_crs_info` (pull #712) * REF: Refactor Proj to inherit from Transformer (issue #624) * REF: Added `pyproj.database`, `pyproj.aoi`, and `pyproj.list` modules (pull #703) * BUG: Fix handling of polygon holes when calculating area in Geod (pull #686) 2.6.1 ~~~~~ * WHL: Wheels contain PROJ version is 7.0.1 * BUG: Allow `*_name` to be added in CRS.to_cf (issue #585) * BUG: Fix building prime meridian in :meth:`pyproj.crs.CRS.from_cf` (pull #588) * BUG: Fix check for numpy bool True kwarg (pull #590) * DOC: Update pyproj.Proj docstrings for clarity (issue #584) * Added `pyproj.__proj_version__` * BUG: Fix :meth:`pyproj.Proj.get_factors` (issue #600) * BUG: fix unequal (!=) with non-CRS type (pull #596) 2.6.0 ~~~~~ * ENH: Added :meth:`pyproj.Proj.get_factors` (issue #503) * ENH: Added type hints (issue #369) * BUG: Don't use CRS classes for defaults in CRS child class init signatures (issue #554) * ENH: Updated :attr:`pyproj.crs.CRS.axis_info` to pull all relevant axis information from CRS (issue #557) * ENH: Added :meth:`pyproj.transformer.Transform.__eq__` (issue #559) * ENH: Added :attr:`pyproj.crs.CRS.utm_zone` (issue #561) * BUG: Modify CRS dict test to accommodate numpy bool types. (issue #564) * BUG: Fix pipeline transformations to match cct (issue #565) * BUG: Don't silently ignore kwargs when projparams are specified (Proj & CRS) (issue #565) 2.5.0 ~~~~~ * WHL: Wheels contain PROJ version is 6.3.1 * Remove deprecated PyObject_AsWriteBuffer (issue #495) * ENH: Added :meth:`pyproj.crs.CRS.equals` with `ignore_axis_order` kwarg (issue #493) * ENH: Added :meth:`pyproj.crs.CoordinateSystem.from_json`, :meth:`pyproj.crs.CoordinateSystem.from_json_dict`, and :meth:`pyproj.crs.CoordinateSystem.from_string` (pull #501) * ENH: Added :class:`pyproj.crs.CoordinateSystem` to `pyproj.crs` namespace (pull #501) * ENH: Added :meth:`pyproj.crs.CoordinateSystem.from_user_input`, :meth:`pyproj.crs.CoordinateOperation.from_user_input`, :meth:`pyproj.crs.Datum.from_user_input`, :meth:`pyproj.crs.PrimeMeridian.from_user_input`, :meth:`pyproj.crs.Ellipsoid.from_user_input` (pull #502) * ENH: Added :meth:`pyproj.crs.CoordinateSystem.from_name`, :meth:`pyproj.crs.CoordinateOperation.from_name`, :meth:`pyproj.crs.Datum.from_name`, :meth:`pyproj.crs.PrimeMeridian.from_name`, :meth:`pyproj.crs.Ellipsoid.from_name` (pull #505) * BUG: Fix getting :attr:`pyproj.crs.Ellipsoid.semi_minor_metre` when not computed (issue #457) * ENH: Added support for custom CRS (issue #389) * ENH: Added enumeration for WKT2_2019 (issue #526) * ENH: Update from_cf/to_cf to use WKT instead of PROJ strings for internal management (issue #515) 2.4.2 ~~~~~ * Elevate +init= warning to FutureWarning (pull #486) * Add UserWarning to :meth:`pyproj.crs.CRS.to_proj4` (pull #486) * BUG: Fix for 32-bit i686 platforms (issue #481) * Return 'inf' in Proj instead of 1.e30 (pull #491) 2.4.1 ~~~~~ * WHL: Wheels contain PROJ version is 6.2.1 (issue #456) * WHL: Wheels for Linux x86_64 use manylinux2010 (pyproj4/pyproj-wheels/pull/18) * BUG: Fix setting lat_ts for mercator projection in :meth:`pyproj.crs.CRS.from_cf` and :meth:`pyproj.crs.CRS.to_cf` (issue #461) * BUG: latlon -> longlat in `CRS.from_cf()` for o_proj so behavior consistent in PROJ 6.2.0 and 6.2.1 (pull #472) * ENH: Add repr for `pyproj.crs.CoordinateOperation` and for `pyproj.transformer.TransformerGroup` (pull #464) 2.4.0 ~~~~~ * Minimum PROJ version is 6.2.0 (issue #411) * Removed global pyproj context (issue #418) * Added support for PROJ JSON in `pyproj.crs` objects and `pyproj.Transformer` (pull #432) * Moved doctests code out of `pyproj.__init__` (issue #417) * Added version information to `python -m pyproj` (pull #429) * Added `scope` & `remarks` to `pyproj.crs` objects and `pyproj.Transformer` (issue #441) * Added `operations` to `pyproj.crs.CoordinateOperation` objects and `pyproj.Transformer` (issue #441) * Added :func:`pyproj.get_authorities` and :func:`pyproj.get_codes` (issue #440) * Release gil in core cython/PROJ code (issue #386) * BUG: Added checks for uninitialized `pyproj.crs` objects to prevent core dumping (issue #433) * BUG: Added fix for get_transform_crs when checking type (pull #439) * DOC: Build docs with python3 (pull #428) 2.3.1 ~~~~~ * Added cleanup for internal PROJ errors (issue #413) * Delay checking for pyproj data directory until importing pyproj (issue #415) * Address issue where PROJ core dumps on proj_create with +init= when global context does not have data directory set (issue #415 & issue #368) 2.3.0 ~~~~~ * Minimum supported Python version 3.5 (issue #331) * New `pyproj.geod.Geod` additions: * Added support for calculating geodesic area (:meth:`pyproj.Geod.polygon_area_perimeter`) and added interface to calculate total length of a line (:meth:`pyproj.Geod.line_length` & :meth:`pyproj.Geod.line_lengths`) (issue #210). * Added support for calculating geodesic area and line lengths with shapely geometries (:meth:`pyproj.Geod.geometry_area_perimeter` & :meth:`pyproj.Geod.geometry_length`) (pull #366) * New `pyproj.transformer` additions: * Added :class:`pyproj.transformer.TransformerGroup` to make all transformations available (issue #381) * Added option for `area_of_interest` for :meth:`pyproj.transformer.Transformer.from_crs`, :meth:`pyproj.transformer.Transformer.from_proj` and :class:`pyproj.transformer.TransformerGroup` * Added :attr:`pyproj.transformer.Transformer.area_of_use` (issue #385) * Added :attr:`pyproj.crs.CoordinateOperation.area_of_use` (issue #385) * Updated to only have one PJ_CONTEXT per pyproj session (issue #374) * Always return latlon with Proj (issue #356) * Remove aenum dependency (issue #339) * Removed deprecated functions `Proj.proj_version`, `CRS.is_valid`, and `CRS.to_geodetic()` (pull #371) * Search on `sys.prefix` for the PROJ data directory (issue #387) 2.2.2 ~~~~~ * Update wheels to PROJ 6.1.1 * Add deprecation warning when using +init= syntax (pull #358) * Added :meth:`pyproj.crs.is_proj` (pull #359) * Fixed case in :meth:`pyproj.crs.CRS.to_dict` with :meth:`pyproj.crs.CRS.to_proj4` returning None (pull #359) * Keep `no_defs` in input PROJ string as it does not hurt/help anything in current code (pull #359) * Made public properties on C classes readonly (pull #359) * Update data dir exception handling to prevent ignoring errors (pull #361) * :meth:`pyproj.crs.CRS.to_cf` export transverse mercator parameters for UTM zones (pull #362) 2.2.1 ~~~~~ * Added :meth:`pyproj.show_versions` (issue #334) * Added fix for whitepace around '=' in PROJ strings (issue #345) * Update version check in `setup.py` (issue #323) * Add "stable" doc site pointing to latest release (issue #347, pull #348) * Deprecate `Proj.proj_version` (pull #337) * Test fixes (pull #333, pull #335) 2.2.0 ~~~~~ * Minimum PROJ version is now 6.1.0 * `pyproj.crs` updates: * Updated CRS repr (issue #264) * Add Datum, CoordinateSystem, CoordinateOperation classes, (issue #262) * Added :meth:`pyproj.crs.CRS.to_cf` and :meth:`pyproj.crs.CRS.from_cf` for converting to/from Climate and Forecast (CF) 1.8 grid mappings (pull #244) * Added :meth:`pyproj.crs.CRS.to_dict` (issue #226) * Added :meth:`pyproj.crs.CRS.to_authority` (pull #294) * Added :attr:`pyproj.crs.CRS.is_vertical` and :attr:`pyproj.crs.CRS.is_engineering` (issue #316) * Added :attr:`pyproj.crs.CRS.target_crs` (pull #328) * Provide option to "pretty print" WKT in :attr:`pyproj.crs.CRS.to_wkt` (issue #258) * Add support for Bound and Compound CRS for :attr:`pyproj.crs.CRS.is_geographic`, :attr:`pyproj.crs.CRS.is_projected` (issue #274) * Add support for Bound CRS for :attr:`pyproj.crs.CRS.is_geocentric` (issue #374) * Add support for comparison with CRS a non-crs type supported by :meth:`pyproj.crs.CRS.from_user_input` (issue #312) * Added support for ITRF, compound EPSG, and urn projection strings in CRS (pull #289) * Better handle Compound CRS (issue #265) * Disallow creation of non-CRS object (eg pipeline) in CRS class (issue #267) * Added check in :meth:`pyproj.crs.CRS.to_epsg` for when `proj_list` is null (issue #257) * Fix comparing classes of non-instance types (issue #310) * `pyroj.transformer` updates: * Added `always_xy` option to Transformer so the transform method will always accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitudecfor geographic CRS and easting, northing for most projected CRS (issue #225) * Provide `direction` option in :meth:`pyproj.transformer.Transformer.transform` (issue #266) * Add check for valid initialization of Transformer and ensure it is a transformer (issue #321) * Added :meth:`pyproj.transformer.Transformer.to_wkt` as well as attributes related to `PJ_PROJ_INFO` (pull #322) * Undo deprecation of :meth:`pyproj.transformer.Transformer.from_crs` (issue #275) * Fix false positive errors raised in transformer (issue #249) * Fix :class:`pyproj.Proj` initialization from DerivedGeographicCRS (issue #270) * Add interface to get the projection/ellps/prime_meridian/units lists (issue #251) * Docs/Build/Test fixes (pull #278, pull #245, pull #248, pull #247, issue #253, pull #252) 2.1.3 ~~~~~ * Added support for time transformations (issue #208) * Fixed projection equivalence testing for transformations (pull #231). * Switch to pytest for testing (pull #230) * Various testing fixes (pull #223, #222, #221, #220) * Convert PROJ error messages from bytes to strings (pull #219) * Fix data dir path separator to be (;) for windows and (:) for linux (pull #234) 2.1.2 ~~~~~ * Updated to use the CRS definition for Proj instances in transforms (issue #207) * Add option to skip transformation operation if input and output projections are equivalent and always skip if the input and output projections are exact (issue #128) * Update setup.py method for checking PROJ version (pull #211) * Add internal proj error log messages to exceptions (pull #215) 2.1.1 ~~~~~ * Restore behavior of 1.9.6 when illegal projection transformation requested (return ``inf`` instead of raising an exception, issue #202). kwarg ``errcheck`` added to :func:`pyproj.transformer.transform` and :func:`pyproj.transformer.itransform` (default ``False``). When ``errcheck=True`` an exception is raised. 2.1.0 ~~~~~ * Added :class:`pyproj.transformer.Transformer` to make repetitive transformations more efficient (issue #187) * Added fix for using local datumgrids with transform (issue #191) * Added :meth:`pyproj.transformer.Transformer.from_pipeline` to support pipeline transformations. * Added fix for conversion between radians/degrees for transformations (issues #192 & #195) 2.0.2 ~~~~~ * add filter for boolean values in dict2string so "no_rot=True" works (issue #183). * make sure .pxd files included in source tarball. * add radians flag back in for transform/itransform (issue #185). 2.0.1 ~~~~~ * Ensure data path set properly for TransProj (pull request #179, addressed issue #176). 2.0.0 ~~~~~ * Update to PROJ version 6.0.0 & removed support for older PROJ versions. * Added pyproj.CRS class. * Updated pyproj.Proj & pyproj.transform to accept any input from CRS.from_user_input. * Removed internal PROJ source code. * Changed default for preserve_units to be True in pyproj.Proj class initialization. * Modified logic for searching for the PROJ data directory to not conflict with older versions of PROJ. * Added pyproject.toml. 1.9.6 ~~~~~ * fix segfault when inverse projection not defined (issue #43, pull request #44). * supports python 3.7 1.9.5.1 ~~~~~~~ * fix for issue #42 (compilation error with microsoft visual studio). 1.9.5 ~~~~~ * update proj4 source to latest github master (commit 953cc00fd87425395cabe37641cda905c4b587c1). * port of basemap fix for input arrays in fortran order * restore inverse Hammer patch that was lost when proj4 source code was updated. 1.9.4 (git tag v1.9.4rel) ~~~~~~~~~~~~~~~~~~~~~~~~~ * migrate to github from googlecode. * update proj4 source code from svn r2595 (version 4.9.0RC2). * include runtime_library_dirs in setup-proj.py. * added to_latlong method (issue 51). * fix back azimuth when lon1 and lon2 are identical. 1.9.3 (svn revision 327) ~~~~~~~~~~~~~~~~~~~~~~~~ * Geod now uses C code adapted from geographiclib now included in proj4 source, instead of pure python code directly from geographiclib. * make radians=True work with Geod.npts (issue 47). * allow PROJ_DIR env var to control location of proj data (issue 40). 1.9.2 (svn revision 301) ~~~~~~~~~~~~~~~~~~~~~~~~ * updated proj4 src to 4.8.0 - includes two new map projections (natearth and isea). 1.9.1 (svn revision 285) ~~~~~~~~~~~~~~~~~~~~~~~~ * restore compatibility with python 2.4/2.5, which was broken by the addition of the geographiclib geodesic module (issue 36). 1.9.0 (svn revision 282) ~~~~~~~~~~~~~~~~~~~~~~~~ * use pure python geographiclib for geodesic computation codes instead of proj4. * don't use global variable pj_errno for return codes, use pj_ctx_get_errno instead. * use new projCtx structure for thread safety in proj lib. * update C source and data from proj4 svn (r2140). * add pj_list and pj_ellps module level variables (a dict mapping short names to longer descriptions, e.g. pyproj.pj_list['aea'] = 'Albers Equal Area'). 1.8.9 (svn revision 222) ~~~~~~~~~~~~~~~~~~~~~~~~ * Python 3 now supported. * allow 'EPSG' init (as well as 'epsg'). This only worked on case-insensitive filesystems previously. Fixes issue 6. * added inverse to Hammer projection. * updated proj.4/src/pj_mutex.c from proj4 svn to fix a threading issue on windows (issue 25). Windows binary installers updated (version 1.8.8-1), courtesy Christoph Gohlke. * if inputs are NaNs, return huge number (1.e30). 1.8.8 (svn revision 196) ~~~~~~~~~~~~~~~~~~~~~~~~ * add extra datum shift files, added test/test_datum.py (fixes issue 22). datum shifts now work correctly in transform function. 1.8.7 (svn revision 175) ~~~~~~~~~~~~~~~~~~~~~~~~ * reverted pj_init.c to old version (from proj4 4.6.1) because version in 4.7.0 includes caching code that can cause segfaults in pyproj (issue 19). * added 'preserve_units' keyword to Proj.__init__ to suppress conversion to meters. 1.8.6 (svn revision 169) ~~~~~~~~~~~~~~~~~~~~~~~~ * now works with ms vs2008, vs2003 (fixed missing isnan). * updated to proj 4.7.0 (fixes a problem coexisting with pyqt). * allow Geod instance to be initialized using a proj4 string 1.8.5 (svn revision 155) ~~~~~~~~~~~~~~~~~~~~~~~~ * allow Proj instance to be initialized using a proj4 string (instead of just a dict or kwargs). 1.8.4 (svn revision 151) ~~~~~~~~~~~~~~~~~~~~~~~~ * updated proj4 sources to version 4.6.0 1.8.3 (svn revision 146) ~~~~~~~~~~~~~~~~~~~~~~~~ * fixed bug in Geod class that caused erroneous error message "undefined inverse geodesic (may be an antipodal point)". * fix __reduce__ method of Geod class so instances can be pickled. * make sure points outside projection limb are set to 1.e30 on inverse transform (if errcheck=False). * fixed small setup.py bug. * generate C source with Cython 0.9.6.6 (pycompat.h no longer needed). 1.8.2 ~~~~~ * added 'srs' (spatial reference system) instance variable to Proj. * instead of returning HUGE_VAL (usually 'inf') when projection not defined and errcheck=False, return 1.e30. * added Geod class for geodesic (i.e. Great Circle) computations. Includes doctests (which can be run with pyproj.test()). * proj.4 source code now included, thus removing proj.4 lib dependency. Version 4.5.0 is included, with a patch to create an API for geodesic computations. * python 2.4 compatibility patch (suggested by Andrew Straw) from M. v. Loewis: http://mail.python.org/pipermail/python-dev/2006-March/062561.html 1.8.1 ~~~~~ * if given tuples, returns tuples (instead of lists). * test for numpy arrays first. * Fixed error in docstring example. * README.html contains html docstrings generated by pydoc. * Renamed pyproj.so to _pyproj.so, created a new python module called pyproj.py. Moved as code as possible from _pyproj.so to pyproj.py. * docstring examples now executed by doctest when 'pyproj.test()' is run. * added test to _pyproj.c which defines Py_ssize_t for python < 2.5. This is necessary when pyrex 0.9.5 is used. 1.8.0 ~~~~~ * Better error handling Proj.__init__. * Added optional keyword 'errcheck' to __call__ method. * If True, an exception is raised if the transformation is invalid. 1.7.3 ~~~~~ * python 2.5 support. pyproj-3.7.1/docs/index.rst000066400000000000000000000015341475425760300156560ustar00rootroot00000000000000pyproj Documentation ==================== Python interface to `PROJ `_ (cartographic projections and coordinate transformations library). GitHub Repository: https://github.com/pyproj4/pyproj .. note:: Minimum supported PROJ version is 9.2 .. note:: Minimum supported Python version is 3.10 .. note:: Linux (manylinux2014) wheels require pip 19.3+ .. note:: pyproj 3 wheels do not include transformation grids. For migration assistance see: :ref:`transformation_grids` .. toctree:: :maxdepth: 1 :caption: Contents: installation examples transformation_grids gotchas api/index cli advanced_examples build_crs build_crs_cf crs_compatibility optimize_transformations history past_versions Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` pyproj-3.7.1/docs/installation.rst000066400000000000000000000127351475425760300172550ustar00rootroot00000000000000.. highlight:: shell ============ Installation ============ The easiest methods for installing pyproj are: 1. Use pip to install the binary wheels from `PyPI `__: .. code-block:: bash python -m pip install pyproj .. note:: Linux (manylinux2014) wheels require pip 19.3+ .. note:: pyproj 3+ wheels do not include transformation grids. For migration assistance see: :ref:`transformation_grids` - The MacOS and Linux wheels are powered by `cibuildwheel `__ & `multibuild `__ - The Windows wheels versions <= 3.3.x were built by `Christoph Gohlke `__ 2. Use `conda `__ with the `conda-forge `__ channel: .. code-block:: bash conda config --prepend channels conda-forge conda config --set channel_priority strict conda create -n pyproj_env pyproj conda activate pyproj_env .. note:: "... we recommend always installing your packages inside a new environment instead of the base environment from anaconda/miniconda. Using envs make it easier to debug problems with packages and ensure the stability of your root env." -- https://conda-forge.org/docs/user/tipsandtricks.html .. warning:: Avoid using `pip install` with a conda environment. If you encounter a python package that isn't in conda-forge, consider submitting a recipe: https://github.com/conda-forge/staged-recipes/ - `pyproj` is maintained by the `pyproj-feedstock maintainers `__ - `PROJ` is maintained by the `proj.4-feedstock maintainers `__ If these installation methods do not meet your needs, the section below provides further instructions for getting setup. 3. Install nightly wheels from anaconda: .. code-block:: python python -m pip install pyproj --pre --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple Transformation Grids ===================== See: :ref:`transformation_grids` Installing from source ====================== Version compatibility matrix: ============ ============ pyproj PROJ ============ ============ <= 1.9.6 <= 5.2 2.0-2.1 6.0-7 2.2-2.3 6.1-7 2.4-2.6 6.2-7 3.0.0 7.2 3.0.1-3.2 7.2-9.1 3.3 8.0-9.1 3.4+ 8.2+ 3.5+ 9+ 3.7+ 9.2+ ============ ============ Setup PROJ ------------ PROJ is required when building from source. :ref:`PROJ Installation Instructions ` You can also download PROJ from: - https://download.osgeo.org/proj - https://github.com/OSGeo/PROJ pyproj Build Environment Variables ----------------------------------- .. envvar:: PROJ_VERSION .. versionadded:: 3.0 This sets the version of PROJ when building pyproj. This enables installing pyproj when the PROJ executables are not present but the header files exist. .. envvar:: PROJ_DIR This is the path to the base directory for PROJ. Examples of how to set the PROJ_DIR environment variable: Windows:: set PROJ_DIR=C:\OSGeo4W\ Linux:: export PROJ_DIR=/usr/local .. envvar:: PROJ_LIBDIR This is the path to the directory containing the PROJ libraries. If not set, it searches the `lib` and `lib64` directories inside the PROJ directory. .. envvar:: PROJ_INCDIR This is the path to the PROJ include directory. If not set, it assumes it is the `includes` directory inside the PROJ directory. .. envvar:: PROJ_WHEEL This is a boolean value used when building a wheel. When true it includes the contents of the `pyproj/proj_dir/proj/share` directory if present. .. envvar:: PYPROJ_FULL_COVERAGE Boolean that sets the compiler directive for cython to include the test coverage. Setup pyproj ------------ In the setup.py, the order for searching for PROJ is: 1. The :envvar:`PROJ_DIR` environment variable 2. The internal PROJ directory (pyproj/proj_dir) 3. The `proj` executable in sys.prefix 4. The `proj` executable on the PATH For best results, set the :envvar:`PROJ_DIR` environment variable to point to location of PROJ installation before running setup.py. If you have a previous version of PROJ installed alongside the current version of PROJ, the best way to avoid conflicts is to: 1. Remove the previous PROJ from `PATH` & unset the `PROJ_DATA`` (PROJ 9.1+) | `PROJ_LIB` (PROJ<9.1) environment variables (temporarily) 2. Install PROJ to the internal PROJ directory (pyproj/proj_dir) 3. Set the environment variable :envvar:`PROJ_DIR` to point to the internal PROJ directory 4. Set the environment variable :envvar:`PROJ_WHEEL` to true 5. Build pyproj Install pyproj ~~~~~~~~~~~~~~ .. note:: `Cython `_ or pip>=10.0.1 is required for the installation. .. note:: You may need to run pip with administrative privileges (e.g. `sudo pip`) or perform a user only installation (e.g. `pip install --user`). From pypi: ^^^^^^^^^^ .. code-block:: bash pip install pyproj --no-binary pyproj From GitHub with `pip`: ^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: bash pip install git+https://github.com/pyproj4/pyproj.git From cloned GitHub repo for development: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: bash pip install -e . pyproj-3.7.1/docs/make.bat000066400000000000000000000014001475425760300154120ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=python -msphinx ) set SOURCEDIR=. set BUILDDIR=_build set SPHINXPROJ=pyproj if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The Sphinx module was not found. Make sure you have Sphinx installed, echo.then set the SPHINXBUILD environment variable to point to the full echo.path of the 'sphinx-build' executable. Alternatively you may add the echo.Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd pyproj-3.7.1/docs/media/000077500000000000000000000000001475425760300150715ustar00rootroot00000000000000pyproj-3.7.1/docs/media/icon.png000066400000000000000000000015511475425760300165310ustar00rootroot00000000000000PNG  IHDRasBIT|d pHYsItEXtSoftwarewww.inkscape.org<tEXtAuthorSylvain Beorchia>gtEXtCreation Time2019/10/01VXtEXtCopyrightCC0 Public Domain Dedication http://creativecommons.org/publicdomain/zero/1.0/;IDAT8Kq?Q&!,^ St_ڨ?N*I " B4hsJH/mEfXBvyi~;;4ّg =ykf#\[.r E?\cu+ޜֺfL:FeUJK:)W[ Xf l?,Nz_ CV jg53i +$@?|| WM Џ"]C[FHF KK}fj*6f( ,y l}2uUr e=F`,3U1rqOG"˘(7Yus&Fy # kS\?ے6vyVnZf#R+$Py*n<< ZɄwGpNy ihDu[hzj^G /kUPj0Y8=kK34١gw ޠUIENDB`pyproj-3.7.1/docs/media/icon.svg000066400000000000000000000073241475425760300165500ustar00rootroot00000000000000 image/svg+xml 2021/08/04 Brendan Jurd pyproj-3.7.1/docs/media/logo.png000066400000000000000000000366431475425760300165530ustar00rootroot00000000000000PNG  IHDR-tsBIT|d pHYs+tEXtSoftwarewww.inkscape.org<tEXtAuthorSylvain Beorchia>gtEXtCreation Time2019/10/01VXtEXtCopyrightCC0 Public Domain Dedication http://creativecommons.org/publicdomain/zero/1.0/ IDATxyxT{f&a]6wVK"Q*PW)Ak[Z[j$eV p-E6w6s# If{9Is^Åι/ B!B!B!B!B!B!B!B!B!B!B!B!B!@!5x٘Z: ~ )I?!0^ rT:Myt|qO!0EtS]xj?02B1=$~@`*u׍+.ʟBan4eHt; J4 'BW aK@ThR$B!"z%c]^'PD#ɟBiwiO!3/$4)O!a]:F4E !abuoh$B!Ugq&J !!5 t uzz1GVBRO#BonO:OԬ#ҹ S.-5`l*mlU5u! OF?!@;(&uԜe0Qk#L55 P^9nCBO?!MΐͩX"\ \@h_VwV?{jшL?!MBgfWpؾ.ӏf<Kwmb Kh䴯BF+?>Trx(3dOu76no[1 sRɹ%վ)IB4*ny.ʫi Y,œg#'Cۦ=-p3W30=Q]/A&C^ !h:ZTU#hq]'~GuvkKcf-'`BBB!QP4'C\j:7QxkA,xRFe`+4e^ʝ'M$B!KC+@Em!ֻ]]҄SbOл@ݛnw{h$BѠ4z V|v nۘ@,K#D"݄B4$C$~pɟzOvÉ0.{hJQ !ɟB㪠hV1apܺDo^8oc٥}kPpɷ!]4LE,`9hMnkE3\3?ź$[9tʟBTo&~-?xH FT7/hvH? +QA>AxZ \"h`:Gr\׬~7Y\R2 zBay+|n8OF>)&h\xULox$l޼ъ74(R'RW:<Ȗm{:&$E34@KӱAϣ][w%&N9EuD%50K?3놝B! u~:@x݇`dVtm` V1YJT"tcr*`޿PQ ITJd3sW5ׂ$*R_2R[9:?pW$2dzdYѻ+4g-*0}\׭t$~DT.|=E]4[`:횘k%]z;5SKDh VUPj@- H 5?#G>Rީ(OC+y4Hup}1u^@H]Hl5úCY@S0mqzD(;UP"x2qݨLSK뾿mAf!J@k R _߼3@v;>TLf٘SFRT'&"ZN){l=,Ħg:?]u:2=7 #r+f"2 K+ oo5DӱLc&.yDFY8ܖHX@'+wWϲeU9|0 '|5a?F5OBTþ)%@y bTN)n1S-'bd޻  0#M.[i:L\2vV'MXX!9eN2*Swa!J6/U+Fk:Z8ܹ%oJ'洎ji8; 4D|οv)PѾ`/ߪ~,*Hwp^=n![Un/\6Xi.i ޤ|٘Nj6p `fmN 9Y-1J@ ] UE@Z~uo ֶT`sx2fg^@\d:0w&-=K1p gGS4͏䜼fxDF[ kөhw<0ojT)\+xu!=Nmmތ"h}gȼ#*y9_¯cӥA?A*BF=ka;'˦:ףQZb|@}Jkz%^Y9ja1~j:f[)y !`LC UCrxu@TQ1|5IfvTp/S׈]=+E:18{wȼBeSdU7^^;grEmKrtK(.210^:~sn\^a&]?YLǑ &W.C>G1}f?8$>ݷ"O~X>ZD }L$)ý?/L7Upm:h \1*ɥC.zغBԞ#\֨MGG*/`M`܆>%Q\ؽxиDpEVcr0V23 ft^) k?41$v"d}|nlu 0~:@ !uob; f/54c4EȋGl0Ը˻ќsS! Oq4b8@S(XX10aex~_&adY $0)}s5{lk_)~B?\W̗/kpcBxEO'YdSZ<ɪɳA-t2JzO$]R3;Q\x+"m/"`Lfq}Oj?Q e[="4+G"}mRϺL*wԹ{-?>E%$* t/6}s+_ٟ/<0"10 @!E6!_zT"G?K~|V<@3_DAH~^6N!.!,S|/RlO@q讓neгD/ku 虯:uҒA| Uӏߺ)ݟ'.֩{ 8=9QiSZ<F[.KwonuUГϏ95y=a{O"_-xk ךV ǑzDq YAEq:WjVp[ԍ^!8?GM[6c|dL'[Sš-Y6M j}4Īs_'; @wCJ>t{QIP'10(uZq&lמZpPIx!-.20'7Fhj@7ڋ\'Vݴ瞴䐇9x~j> &?y+[k7M^̆$Ġy4APfků4B4/ດh)_/J|.lq 5p@V!Aν!6j aZWWRس`mq9«G/MtLRaSIä<`@+9쐷C\~byms1pa-_<~Yo+z)T2֓ K bv;$aP)‰C1jNAœ=7 z˯f`чҦt>skJG}2gA պO'P7\pn~4c~T 5 z,9 /{P̏j[nyp0KǒeHwŪ."ݵIK1=Fuczd_^3$!~ :[?}W~Gf?O&&@DaF=. -a@ Ø^xW.}a}W5f:̸Ok>׬k$_:喈̀ ~#jWEZiqfvB8؁jTbH+MKVU`C28EF'N>b0ڷO~ǥ g6_%DT26U~];n!3{ZHu&Ad !TWrrv&WWħF`cթB+-^3 >Ny&#:8? z/Xk߃IwePgvIF25L}JNt~bF+! (M)CL^}2V~U69de/X_y~r0>y\~ t$;sڪl-G4?lnz7 ʤD)eCZ":UTS=Zɾ\N<@C,by=m;`G,il])8$RL۝8ul^gSg̗SF-Ŭf mjWhIUNa+sя`@I=kL{9 }%%CBW&ݠpKALtG|.qo~zRn `M 8m,m-Nb9iW%&v@g=#.0+ˡtЊ_am=Yk߃Iwv_dPNeOwF{WkA)aH i&Ut 8ÚxM6O!P nzy{Zy'rkCdEhpɟ5+ ?Y@Fg5x&J{5_nvG+ӮUGu;Pcb~'7X_λ#UE~aU &+mz >JVޖ3L<N=`}0 *Ӳf:Eoֶ|Iu<\ekOь~43ee=|e>0id7o p0ue 'Rw|]iA{NQГX*[D|Hf S߁ȚP璬N^:7t<}dK͈x:$86hزzuuÉE_Û%B_Э gs\";'S7 g{ey&ѡb?iv$ +o*2ZIeev+,SQJ>/#8V { 3[N e͝> lQKW W%{kAw,_giQwx?9 wt9%넞5X;_q1l>ɗ䏨ā1VHϾ5eG0I2KsQGD=ϟ;ly60~S#מK^fCp?߰R6uKVUܞ<Î>4+|Xnkh:u3~a˶mH׾(tY{xI"?$^x.BtHKcN3Ds+1V6m\*.?'2GlߪH Ey13Ab{8Ϧ"D8f+P==kAS׬{82ߧ77g2z>! <ݙa Law%% XmL; 3vS.cArt/+_رo/f 4#бA9{_䦒? {tv< lu)Ao3QZos@%u4s./mH~U͈J8^=,(B&`Gu l cpZ[ˏ8 (0 W"~&mCȓmi_B}߼+S3rCsnw*U"@tHT:aZ?u${N)C ;^ [3J'l?,0#Ǟ^Vy&C+l))_Sog$ _:SoAckA)4W"ا@e~ve?N\E4VHԯw(L剌Е.TV#%TW 5T=B ^ TR24gNF0Y6(I#A NoYqx"6t{1ho[Bׂ@1łLu?~mV;%|iIDATV,(@l7EFq5¦=Q}S ?hD4g6Dq  G%t6?:czsdu Bhv޳ }7vy,4e3V/PBt<+n*gTlEZM&&5YuiÎ6 x~vuz0|AÓ}7a\{7GH= މ0hn\͐WV` @0aAɑ;{.~C+b*71׾ T@7)u y2l+ a:Ⱦ[-AEjԀ85=/׮%om R Hi_!ӳ# Wx@N*<۾+_mW}4T`fS}Plh•Ў{gE//^&踧@ur/|([V-8@ǽ%Ο]o5ƣ"]1k[͖몣[K}?O|5tt4Nx#4fk_Vf^^gMJMMN,XuKg8kf_| iCovRA$mn#c~qu}!M:)+1WnB+weBT.À:5C/D%嘍"kOx)v+3t^3sf|D *Yxʷ t|O?Sdɩ&CqxD`R|f:vA;CKh>wbY %K+oλȯu~W?1r_f:RK4\e:d3X,^\"ƍ+FqRtB# ^9_. n%AM`Y9/RZ)-8tYvN. %qpTr@vT6t%kt^~6#,ggO%cgu rzﺣ:åoө>= Dع%9Xaasdb!4GȲҁgz6 Fq|Ecr:LB8sS@Aje]'2i&Ҋ嵯71b,wJڽŊʥ+Yi+ܣhXӰN@:˸ujO=Dzl$ $ 0?Ņ Nt)*Rl_nvhM'vx@PDlN:*xV́.Lşk!'D_ ~'JkXG*BĶN_y:򿙖U|0V"zRlYA}{dQ?7Sx=7Za*q7#`hY3 ^z!|&ƞ]4/2q $bWpgV[{^rd3V \JK,!UYUyKpx8?avS Il[z7><裷F e8PjFw GW;)!nqĹ0`)K[C{;O\]s'q2C L@ΊQy̼bɪE< o> y^wݱpOeszt e՗:[4y?Y?Y^%gh= :.~?{^^rvScqT6ݔڻ|Г4Wf4ϲ?b0C0,traV&EV&C -)qf. NO9a`y)RV"><EEwdM`ၭ튓 3>خO ^;*01G-6 `k_zp,mD%;Ž' 2^}AZ% ~]3=.~jVJ4?x* 7TI̿z6rZ=L |(B V>֖Zl&m9zUuu=e_w:zHIlM(=5^G"5߁Wzǧv.yͪ~O{G"dg9zbŊ.C8'z>]{rroZQH}!%J0Vku򒬺̏ O%c<%٣}k}?VF%.@^{ui|)(-"R=P_{UvHW±Q5]+@d*jf=xCgt_CvlmcKq׾:oKټ=XI|h5?D=+ ߝ_4_tʓ}亿y3Kcg_rRopvS/,-[n>btIIeaÇUA1XO[MP+ ֗Vߤ_FGW91)UqzP1vS9Ehof5\G<DhfzI{I6f?ύ[ǦC49MQ`/L5L!ht ,;eM77iFf-xFe>ay'S[Gīv $fnըn^I g{!#rY">_F\ϭ콝+k$scS&qdꜾ` cȾW1,`jVzFqhXrpb&|]'/`C|Ḷy|.p̍/'`8@Zq3ś[}fu7ɡu倧s>%Ώl{)=sIe-)o=?ckDզZDn:ZHM;S{Nn:Lǖ{-XPL֮V`wl9~]9%~(.'OÜ@MEʏ9{,+ߚW3'?0~SYGs.~R͎'g@|H6i\}3О?ƳՑ] ,gcLpr) +iT|BG Zhu%onpebc0t:&닕Ncw Μ{ut0aJKEn]0=oŧ}N]3/~SOxiGIelTfLo=uRY^^6drK3ӎ zOV1\ L5( ߈+Iob_Ԟ_y`h_^*/ϡ"ۣҪ S9Pw0Sͤ?Q\Ei1*bU)NNdKQSVٺ?ߢ~cqjzV`g{ `Ä`lvfl*l6b۹+ˮEVʛ`<:: W=ˏF>+M LVхNļf@p9gh?D\yLnd۫h_1PW+ dc.>ёa`@@M OD"s&,]k:&' ecV!D.uzV {Ҙ3|6Zu^AS Q顥[6SK?a3uӷ\X:N3ac;WL|Oj KnBk*a 2ZMFQE> 쏆B81~AR>k O67W< \(_!+:jHZĮ;S7"<9DIM#/&G5B :+mNTDK?!ɏF(6 8E%J rYdg/ "jPZ\h䵯B$D#u>^ b OulMmj/ }9]%k6%BaA|((3B ƾUⅉzt,,YBa9OD9*cDzԮ[ 61ƅ OH'x1>CXgsݷp]pa<: oc+|Jڴ}䵯B%qWS4bš=m%^vRto"^(h䴯BO^ KsW3jfg—=D/x(La:a/IB4 4(0pt];, 3 vk[}yQb9ZE_n:a?IB4*ny.ʫi 8YwYkǝ\F_kND 9'QЕUJsS \DcK7 Jl?0rWWZa: 0ʟB&!?>J~{ُIVbe`@&]X| %YB$$k\7}cCH@xXG^]4O* IB4ࡥ|+l4^߱deOA?!MˎÛ%/sN }8@mD%r}w2m3KrzĜ PoB$Û@>ޥS$(! h=jBl9|=0B#ɟBp+_5! O!D@ОM.?!̑O!D;<\KhT$BijBWёO!Dު[hёO!D㧰UNϼw(B&ɟBFN)?pwO &!̒O!DM2hb IB49x7ZOTH'II473o!O!DAm@e@4G E,v6uLtB!"PjH+|DKq!B!B!B!B!B!B!B!B!B!B!B!B!B!"tRDV&"IENDB`pyproj-3.7.1/docs/media/logo.svg000066400000000000000000000361131475425760300165560ustar00rootroot00000000000000 image/svg+xml 2021/08/04 Brendan Jurd pyproj-3.7.1/docs/optimize_transformations.rst000066400000000000000000000001651475425760300217170ustar00rootroot00000000000000.. _optimize_transformations: Optimize Transformations ======================== Moved to: :ref:`advanced_examples` pyproj-3.7.1/docs/past_versions.rst000066400000000000000000000015441475425760300174470ustar00rootroot00000000000000Documentation Archive ===================== - `3.7.0 `_ - `3.6.1 `_ - `3.5.0 `_ - `3.4.1 `_ - `3.3.1 `_ - `3.2.1 `_ - `3.1.0 `_ - `3.0.1 `_ - `2.6.1 `_ - `2.5.0 `_ - `2.4.2 `_ - `2.3.1 `_ - `2.2.2 `_ - `2.1.3 `_ - `1.9.6 `_ pyproj-3.7.1/docs/transformation_grids.rst000066400000000000000000000100421475425760300207770ustar00rootroot00000000000000.. _transformation_grids: Transformation Grids ===================== Transformation grids improve accuracy when you are performing datum transformations. More information about the data available is located under the PROJ :ref:`resource files ` documentation. .. note:: `pyproj` API for managing the :ref:`data_directory` and :ref:`network_api`. .. note:: pyproj 3 wheels do not include any transformation grids. Downloading data ---------------- PROJ 7+ ^^^^^^^^ PROJ 7.0 has introduced, per :ref:`PROJ RFC 4: Remote access to grids and GeoTIFF grids `, the capability to work with grid files that are not installed on the local machine where PROJ is executed. Available methods for download include: - `Mirroring the data `__: To download to the PROJ user-writable data directory: .. versionadded:: 7.1.0 .. code-block:: bash export PROJ_DOWNLOAD_DIR=$(python -c "import pyproj; print(pyproj.datadir.get_user_data_dir())") To download to the main PROJ data directory: .. code-block:: bash export PROJ_DOWNLOAD_DIR=$(python -c "import pyproj; print(pyproj.datadir.get_data_dir())") Download the files with either: .. code-block:: bash aws s3 sync s3://cdn.proj.org ${PROJ_DOWNLOAD_DIR} or: .. code-block:: bash wget --mirror https://cdn.proj.org/ -P ${PROJ_DOWNLOAD_DIR} - The :ref:`projsync ` command line program. - `pyproj sync `__ command line program (pyproj 3+; useful if you use pyproj wheels). - Enabling :ref:`PROJ network ` capabilities. See also :ref:`network_api`. - Download stable from https://download.osgeo.org/proj or latest from https://github.com/OSGeo/PROJ-data - Use `conda `__ with the `conda-forge `__ channel: .. code-block:: bash conda install -c conda-forge proj-data PROJ <= 6 ^^^^^^^^^^ Available methods for download include: - Download stable from https://download.osgeo.org/proj or latest from https://github.com/OSGeo/proj-datumgrid - Use `conda `__ with the `conda-forge `__ channel: .. code-block:: bash conda install -c conda-forge proj-datumgrid-europe proj-datumgrid-north-america proj-datumgrid-oceania proj-datumgrid-world What grids to download? ----------------------- - Only using the :obj:`pyproj.crs.CRS` or :obj:`pyproj.Geod` classes? Then no grids are needed. - Have a machine that can hold and extra 500 MB - 1 GB of data? Then downloading all grids shouldn't be an issue. - Have a machine with limited space, a great network connection, and PROJ 7+? Look into `PROJ network `__ capabilities. See also :ref:`network_api`. - Have a machine with limited space and want to pre-download files? You can enable enable :ref:`debugging-internal-proj` with pyproj 3+ and perform a transformation. The logs will show the grids PROJ searches for. Additionally, the :class:`pyproj.transformer.TransformerGroup` can assist finding the grids you need to download. .. warning:: There are cases where the URL to download the grid is missing. .. code-block:: python >>> from pyproj.transformer import TransformerGroup >>> tg = trans_group = TransformerGroup(4326, 2964) UserWarning: Best transformation is not available due to missing Grid(short_name=us_noaa_alaska.tif, full_name=, package_name=, url=https://cdn.proj.org/us_noaa_alaska.tif, direct_download=True, open_license=True, available=False) >>> tg - transformers: 8 - unavailable_operations: 2 >>> tg.transformers[0].description 'Inverse of NAD27 to WGS 84 (7) + Alaska Albers' >>> tg.unavailable_operations[0].name 'Inverse of NAD27 to WGS 84 (85) + Alaska Albers' >>> tg.unavailable_operations[0].grids[0].url 'https://cdn.proj.org/us_noaa_alaska.tif' >>> tg.download_grids(verbose=True) # pyproj 3+ Downloading: https://cdn.proj.org/us_noaa_alaska.tif Downloading: https://cdn.proj.org/ca_nrc_ntv2_0.tif pyproj-3.7.1/flake8/000077500000000000000000000000001475425760300142345ustar00rootroot00000000000000pyproj-3.7.1/flake8/cython.cfg000066400000000000000000000003441475425760300162220ustar00rootroot00000000000000[flake8] filename = *.pyx,*.pxd max-line-length=88 select=E302,E203,E111,E114,E221,E128,E231,E126,E265,E305,E301,E127,E261,E271,E129,W291,E222,E241,E123,F403,C400,C401,C402,C403,C404,C405,C406,C407,C408,C409,C410,C411,E501,E303 pyproj-3.7.1/pyproj/000077500000000000000000000000001475425760300144055ustar00rootroot00000000000000pyproj-3.7.1/pyproj/__init__.py000066400000000000000000000060541475425760300165230ustar00rootroot00000000000000""" Python interface to PROJ (https://proj.org), cartographic projections and coordinate transformations library. Download: http://python.org/pypi/pyproj Requirements: Python 10+. Contact: Jeffrey Whitaker Copyright (c) 2006-2018, Jeffrey Whitaker. Copyright (c) 2019-2024, Open source contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import warnings import pyproj.network from pyproj._context import ( # noqa: F401 pylint: disable=unused-import set_use_global_context, ) from pyproj._show_versions import ( # noqa: F401 pylint: disable=unused-import show_versions, ) from pyproj._version import ( # noqa: F401 pylint: disable=unused-import PROJ_COMPILED_VERSION, PROJ_COMPILED_VERSION_STR, PROJ_VERSION, PROJ_VERSION_STR, ) from pyproj.crs import CRS # noqa: F401 pylint: disable=unused-import from pyproj.database import ( # noqa: F401 pylint: disable=unused-import get_authorities, get_codes, get_units_map, ) from pyproj.exceptions import ( # noqa: F401 pylint: disable=unused-import DataDirError, ProjError, ) from pyproj.geod import ( # noqa: F401 pylint: disable=unused-import Geod, geodesic_version_str, pj_ellps, ) from pyproj.list import ( # noqa: F401 pylint: disable=unused-import get_ellps_map, get_prime_meridians_map, get_proj_operations_map, ) from pyproj.proj import Proj, pj_list # noqa: F401 pylint: disable=unused-import from pyproj.transformer import ( # noqa: F401 pylint: disable=unused-import Transformer, itransform, transform, ) __version__ = "3.7.1" __all__ = [ "Proj", "Geod", "CRS", "Transformer", "transform", "itransform", "pj_ellps", "pj_list", "get_ellps_map", "get_prime_meridians_map", "get_proj_operations_map", "get_units_map", "show_versions", ] __proj_version__ = PROJ_VERSION_STR proj_version_str = PROJ_VERSION_STR # pylint: disable=invalid-name __proj_compiled_version__ = PROJ_COMPILED_VERSION_STR try: pyproj.network.set_ca_bundle_path() except DataDirError as err: warnings.warn(str(err)) pyproj-3.7.1/pyproj/__main__.py000066400000000000000000000143171475425760300165050ustar00rootroot00000000000000""" This is the main entry point for pyproj CLI e.g. python -m pyproj """ import argparse import os from pyproj import __proj_version__, __version__, _show_versions from pyproj.aoi import BBox from pyproj.datadir import get_data_dir, get_user_data_dir from pyproj.sync import ( _download_resource_file, get_proj_endpoint, get_transform_grid_list, ) parser = argparse.ArgumentParser( description=f"pyproj version: {__version__} [PROJ version: {__proj_version__}]" ) parser.add_argument( "-v", "--verbose", help="Show verbose debugging version information.", action="store_true", ) subparsers = parser.add_subparsers(title="commands") sync_parser = subparsers.add_parser( name="sync", description="Tool for synchronizing PROJ datum and transformation support data.", ) sync_parser.add_argument( "--bbox", help=( "Specify an area of interest to restrict the resources to download. " "The area of interest is specified as a " "bounding box with geographic coordinates, expressed in degrees in an " "unspecified geographic CRS. " "`west_long` and `east_long` should be in the [-180,180] range, and " "`south_lat` and `north_lat` in the [-90,90]. `west_long` is generally " "lower than `east_long`, except in the case where the area of interest " "crosses the antimeridian." ), ) sync_parser.add_argument( "--spatial-test", help=( "Specify how the extent of the resource files " "are compared to the area of use specified explicitly with `--bbox`. " "By default, any resource files whose extent intersects the value specified " "by `--bbox` will be selected. If using the ``contains`` strategy, " "only resource files whose extent is contained in the value specified by " "`--bbox` will be selected." ), choices=["intersects", "contains"], default="intersects", ) sync_parser.add_argument( "--source-id", help=( "Restrict resource files to be downloaded to those whose source_id property " "contains the ID value. Default is all possible values." ), ) sync_parser.add_argument( "--area-of-use", help=( "Restrict resource files to be downloaded to those whose area_of_use property " "contains the AREA_OF_USE value. Default is all possible values." ), ) sync_parser.add_argument( "--file", help=( "Restrict resource files to be downloaded to those whose name property " " (file name) contains the FILE value. Default is all possible values." ), ) sync_parser.add_argument( "--exclude-world-coverage", help="Exclude files which have world coverage.", action="store_true", ) sync_parser.add_argument( "--include-already-downloaded", help="Include grids that are already downloaded.", action="store_true", ) sync_parser.add_argument( "--list-files", help="List the files without downloading.", action="store_true" ) sync_parser.add_argument( "--all", help="Download all missing transform grids.", action="store_true" ) sync_parser.add_argument( "--system-directory", help=( "If enabled, it will sync grids to the main PROJ data directory " "instead of the user writable directory." ), action="store_true", ) sync_parser.add_argument( "--target-directory", help="The directory to sync grids to instead of the user writable directory.", ) sync_parser.add_argument( "-v", "--verbose", help="Print download information.", action="store_true" ) def _parse_sync_command(args): """ Handle sync command arguments """ if not any( ( args.bbox, args.list_files, args.all, args.source_id, args.area_of_use, args.file, ) ): sync_parser.print_help() return if args.all and any( ( args.bbox, args.list_files, args.source_id, args.area_of_use, args.file, ) ): raise RuntimeError( "Cannot use '--all' with '--list-files', '--source-id'," "'--area-of-use', '--bbox', or '--file'." ) bbox = None if args.bbox is not None: west, south, east, north = args.bbox.split(",") bbox = BBox( west=float(west), south=float(south), east=float(east), north=float(north), ) if args.target_directory and args.system_directory: raise RuntimeError("Cannot set both --target-directory and --system-directory.") target_directory = args.target_directory if args.system_directory: target_directory = get_data_dir().split(os.path.sep)[0] elif not target_directory: target_directory = get_user_data_dir(True) grids = get_transform_grid_list( source_id=args.source_id, area_of_use=args.area_of_use, filename=args.file, bbox=bbox, spatial_test=args.spatial_test, include_world_coverage=not args.exclude_world_coverage, include_already_downloaded=args.include_already_downloaded, target_directory=target_directory, ) if args.list_files: print("filename | source_id | area_of_use") print("----------------------------------") else: endpoint = get_proj_endpoint() for grid in grids: if args.list_files: print( grid["properties"]["name"], grid["properties"]["source_id"], grid["properties"].get("area_of_use"), sep=" | ", ) else: filename = grid["properties"]["name"] _download_resource_file( file_url=f"{endpoint}/{filename}", short_name=filename, directory=target_directory, verbose=args.verbose, sha256=grid["properties"]["sha256sum"], ) def main(): """ Main entrypoint into the command line interface. """ args = parser.parse_args() if hasattr(args, "bbox"): _parse_sync_command(args) elif args.verbose: _show_versions.show_versions() else: parser.print_help() if __name__ == "__main__": main() pyproj-3.7.1/pyproj/_compat.pxd000066400000000000000000000004331475425760300165440ustar00rootroot00000000000000cdef str cstrdecode(const char *instring) cpdef bytes cstrencode(str pystr) IF CTE_PYTHON_IMPLEMENTATION == "CPython": from cpython cimport array cdef array.array empty_array(int npts) ELSE: # https://github.com/pyproj4/pyproj/issues/854 cdef empty_array(int npts) pyproj-3.7.1/pyproj/_compat.pyi000066400000000000000000000000511475425760300165460ustar00rootroot00000000000000def cstrencode(pystr: str) -> bytes: ... pyproj-3.7.1/pyproj/_compat.pyx000066400000000000000000000013271475425760300165740ustar00rootroot00000000000000import array cpdef bytes cstrencode(str pystr): """ Encode a string into bytes. """ try: return pystr.encode("utf-8") except UnicodeDecodeError: return pystr.decode("utf-8").encode("utf-8") cdef str cstrdecode(const char *instring): if instring != NULL: return instring return None IF CTE_PYTHON_IMPLEMENTATION == "CPython": from cpython cimport array cdef array.array _ARRAY_TEMPLATE = array.array("d", []) cdef array.array empty_array(int npts): return array.clone(_ARRAY_TEMPLATE, npts, zero=False) ELSE: # https://github.com/pyproj4/pyproj/issues/854 cdef empty_array(int npts): return array.array("d", [float("NaN")] * npts) pyproj-3.7.1/pyproj/_context.pxd000066400000000000000000000002121475425760300167400ustar00rootroot00000000000000include "proj.pxi" cpdef str _get_proj_error() cpdef void _clear_proj_error() noexcept cdef PJ_CONTEXT* pyproj_context_create() except * pyproj-3.7.1/pyproj/_context.pyi000066400000000000000000000005361475425760300167570ustar00rootroot00000000000000def get_user_data_dir(create: bool = False) -> str: ... def _set_context_data_dir() -> None: ... def _set_context_ca_bundle_path(ca_bundle_path: str) -> None: ... def _set_context_network_enabled() -> None: ... def set_use_global_context(active: bool | None = None) -> None: ... def _clear_proj_error() -> None: ... def _get_proj_error() -> str: ... pyproj-3.7.1/pyproj/_context.pyx000066400000000000000000000200371475425760300167740ustar00rootroot00000000000000import logging import os import threading import warnings from cpython.pythread cimport PyThread_tss_create, PyThread_tss_get, PyThread_tss_set from libc.stdlib cimport free, malloc from pyproj._compat cimport cstrencode from pyproj.utils import strtobool # for logging the internal PROJ messages # https://docs.python.org/3/howto/logging.html#configuring-logging-for-a-library _LOGGER = logging.getLogger("pyproj") _LOGGER.addHandler(logging.NullHandler()) # static user data directory to prevent core dumping # see: https://github.com/pyproj4/pyproj/issues/678 cdef const char* _USER_DATA_DIR = proj_context_get_user_writable_directory(NULL, False) # Store the message from any internal PROJ errors cdef str _INTERNAL_PROJ_ERROR = None # global variables cdef bint _NETWORK_ENABLED = strtobool(os.environ.get("PROJ_NETWORK", "OFF")) cdef char* _CA_BUNDLE_PATH = "" # The key to get the context in each thread cdef Py_tss_t CONTEXT_THREAD_KEY def set_use_global_context(active=None): """ .. deprecated:: 3.7.0 No longer necessary as there is only one context per thread now. .. versionadded:: 3.0.0 Activates the usage of the global context. Using this option can enhance the performance of initializing objects in single-threaded applications. .. warning:: The global context is not thread safe. .. warning:: The global context maintains a connection to the database through the duration of each python session and is closed once the program terminates. .. note:: To modify network settings see: :ref:`network`. Parameters ---------- active: bool, optional If True, it activates the use of the global context. If False, the use of the global context is deactivated. If None, it uses the environment variable PYPROJ_GLOBAL_CONTEXT and defaults to False if it is not found. """ if active is None: active = strtobool(os.environ.get("PYPROJ_GLOBAL_CONTEXT", "OFF")) if active: warnings.warn( ( "PYPROJ_GLOBAL_CONTEXT is no longer necessary in pyproj 3.7+ " "and does not do anything." ), FutureWarning, stacklevel=2, ) def get_user_data_dir(create=False): """ .. versionadded:: 3.0.0 Get the PROJ user writable directory for datumgrid files. See: :c:func:`proj_context_get_user_writable_directory` This is where grids will be downloaded when :ref:`PROJ network ` capabilities are enabled. It is also the default download location for the :ref:`projsync` command line program. Parameters ---------- create: bool, default=False If True, it will create the directory if it does not already exist. Returns ------- str: The user writable data directory. """ return proj_context_get_user_writable_directory( pyproj_context_create(), bool(create) ) cpdef str _get_proj_error(): """ Get the internal PROJ error message. Returns None if no error was set. """ return _INTERNAL_PROJ_ERROR cpdef void _clear_proj_error() noexcept: """ Clear the internal PROJ error message. """ global _INTERNAL_PROJ_ERROR _INTERNAL_PROJ_ERROR = None cdef void pyproj_log_function(void *user_data, int level, const char *error_msg) noexcept nogil: """ Log function for catching PROJ errors. """ # from pyproj perspective, everything from PROJ is for debugging. # The verbosity should be managed via the # PROJ_DEBUG environment variable. if level == PJ_LOG_ERROR: with gil: global _INTERNAL_PROJ_ERROR _INTERNAL_PROJ_ERROR = error_msg _LOGGER.debug(f"PROJ_ERROR: {_INTERNAL_PROJ_ERROR}") elif level == PJ_LOG_DEBUG: with gil: _LOGGER.debug(f"PROJ_DEBUG: {error_msg}") elif level == PJ_LOG_TRACE: with gil: _LOGGER.debug(f"PROJ_TRACE: {error_msg}") cdef void set_context_data_dir(PJ_CONTEXT* context) except *: """ Setup the data directory for the context for pyproj """ from pyproj.datadir import get_data_dir data_dir_list = get_data_dir().split(os.pathsep) # the first path will always have the database cdef bytes b_database_path = cstrencode(os.path.join(data_dir_list[0], "proj.db")) cdef const char* c_database_path = b_database_path if not proj_context_set_database_path(context, c_database_path, NULL, NULL): warnings.warn("pyproj unable to set PROJ database path.") cdef int dir_list_len = len(data_dir_list) cdef const char **c_data_dir = malloc( (dir_list_len + 1) * sizeof(const char*) ) cdef bytes b_data_dir try: for iii in range(dir_list_len): b_data_dir = cstrencode(data_dir_list[iii]) c_data_dir[iii] = b_data_dir c_data_dir[dir_list_len] = _USER_DATA_DIR proj_context_set_search_paths(context, dir_list_len + 1, c_data_dir) finally: free(c_data_dir) cdef void pyproj_context_initialize(PJ_CONTEXT* context) except *: """ Setup the context for pyproj """ proj_log_func(context, NULL, pyproj_log_function) proj_context_use_proj4_init_rules(context, 1) set_context_data_dir(context) proj_context_set_ca_bundle_path(context, _CA_BUNDLE_PATH) proj_context_set_enable_network(context, _NETWORK_ENABLED) cdef class ContextManager: """ The only purpose of this class is to ensure the context is cleaned up properly. """ cdef PJ_CONTEXT* context def __cinit__(self): self.context = NULL def __dealloc__(self): if self.context != NULL: proj_context_destroy(self.context) @staticmethod cdef create(PJ_CONTEXT* context): cdef ContextManager context_manager = ContextManager() context_manager.context = context return context_manager class ContextManagerLocal(threading.local): """ Threading local instance for cython ContextManager class. """ def __init__(self): self.context_manager = None # Initialises in each thread super().__init__() _CONTEXT_MANAGER_LOCAL = ContextManagerLocal() cdef PJ_CONTEXT* pyproj_context_create() except *: """ Create and initialize the context(s) for pyproj. This also manages whether the global context is used. """ global _CONTEXT_MANAGER_LOCAL if PyThread_tss_create(&CONTEXT_THREAD_KEY) != 0: raise MemoryError("Unable to create key for PROJ context in thread.") cdef const void *thread_pyproj_context = PyThread_tss_get(&CONTEXT_THREAD_KEY) cdef PJ_CONTEXT* pyproj_context = NULL if thread_pyproj_context == NULL: pyproj_context = proj_context_create() pyproj_context_initialize(pyproj_context) PyThread_tss_set(&CONTEXT_THREAD_KEY, pyproj_context) _CONTEXT_MANAGER_LOCAL.context_manager = ContextManager.create(pyproj_context) else: pyproj_context = thread_pyproj_context return pyproj_context def get_context_manager(): """ This returns the manager for the context responsible for cleanup """ return _CONTEXT_MANAGER_LOCAL.context_manager cpdef _set_context_data_dir(): """ Python compatible function to set the data directory on the current context """ set_context_data_dir(pyproj_context_create()) cpdef _set_context_ca_bundle_path(str ca_bundle_path): """ Python compatible function to set the CA Bundle path on the current context and cache for future generated contexts """ global _CA_BUNDLE_PATH b_ca_bundle_path = cstrencode(ca_bundle_path) _CA_BUNDLE_PATH = b_ca_bundle_path proj_context_set_ca_bundle_path(pyproj_context_create(), _CA_BUNDLE_PATH) cpdef _set_context_network_enabled(bint enabled): """ Python compatible function to set the network enables on the current context and cache for future generated contexts """ global _NETWORK_ENABLED _NETWORK_ENABLED = enabled proj_context_set_enable_network(pyproj_context_create(), _NETWORK_ENABLED) pyproj-3.7.1/pyproj/_crs.pxd000066400000000000000000000101761475425760300160550ustar00rootroot00000000000000include "proj.pxi" from pyproj.enums import WktVersion from cpython cimport bool cdef extern from "proj_experimental.h": PJ *proj_crs_promote_to_3D(PJ_CONTEXT *ctx, const char* crs_3D_name, const PJ* crs_2D) PJ *proj_crs_demote_to_2D(PJ_CONTEXT *ctx, const char *crs_2D_name, const PJ *crs_3D) cdef tuple _get_concatenated_operations(PJ_CONTEXT*context, PJ*concatenated_operation) cdef _to_proj4( PJ_CONTEXT* context, PJ* projobj, object version, bint pretty, ) cdef _to_wkt( PJ_CONTEXT* context, PJ* projobj, object version, bint pretty, bool output_axis_rule=*, ) cdef class Axis: cdef readonly str name cdef readonly str abbrev cdef readonly str direction cdef readonly double unit_conversion_factor cdef readonly str unit_name cdef readonly str unit_auth_code cdef readonly str unit_code @staticmethod cdef Axis create(PJ_CONTEXT* context, PJ* projobj, int index) cdef create_area_of_use(PJ_CONTEXT* context, PJ* projobj) cdef class Base: cdef PJ *projobj cdef PJ_CONTEXT* context cdef readonly object _context_manager cdef readonly str name cdef readonly str _remarks cdef readonly str _scope cdef _set_base_info(self) cdef class _CRSParts(Base): pass cdef class Ellipsoid(_CRSParts): cdef readonly double semi_major_metre cdef readonly double semi_minor_metre cdef readonly bint is_semi_minor_computed cdef readonly double inverse_flattening @staticmethod cdef Ellipsoid create(PJ_CONTEXT* context, PJ* ellipsoid_pj) cdef class PrimeMeridian(_CRSParts): cdef readonly double longitude cdef readonly double unit_conversion_factor cdef readonly str unit_name @staticmethod cdef PrimeMeridian create(PJ_CONTEXT* context, PJ* prime_meridian_pj) cdef class Datum(_CRSParts): cdef readonly str type_name cdef readonly object _ellipsoid cdef readonly object _prime_meridian @staticmethod cdef Datum create(PJ_CONTEXT* context, PJ* datum_pj) cdef class CoordinateSystem(_CRSParts): cdef readonly list _axis_list @staticmethod cdef CoordinateSystem create(PJ_CONTEXT* context, PJ* coordinate_system_pj) cdef class Param: cdef readonly str name cdef readonly str auth_name cdef readonly str code cdef readonly object value cdef readonly double unit_conversion_factor cdef readonly str unit_name cdef readonly str unit_auth_name cdef readonly str unit_code cdef readonly str unit_category @staticmethod cdef Param create(PJ_CONTEXT* context, PJ* projobj, int param_idx) cdef class Grid: cdef readonly str short_name cdef readonly str full_name cdef readonly str package_name cdef readonly str url cdef readonly bint direct_download cdef readonly bint open_license cdef readonly bint available @staticmethod cdef Grid create(PJ_CONTEXT* context, PJ* projobj, int grid_idx) cdef class CoordinateOperation(_CRSParts): cdef readonly list _params cdef readonly list _grids cdef readonly object _area_of_use cdef readonly str method_name cdef readonly str method_auth_name cdef readonly str method_code cdef readonly double accuracy cdef readonly bint is_instantiable cdef readonly bint has_ballpark_transformation cdef readonly list _towgs84 cdef readonly tuple _operations cdef readonly str type_name @staticmethod cdef CoordinateOperation create(PJ_CONTEXT* context, PJ* coordinate_operation_pj) cdef class _CRS(Base): cdef PJ_TYPE _type cdef PJ_PROJ_INFO projpj_info cdef readonly str srs cdef readonly str _type_name cdef readonly Ellipsoid _ellipsoid cdef readonly object _area_of_use cdef readonly PrimeMeridian _prime_meridian cdef readonly Datum _datum cdef readonly list _sub_crs_list cdef readonly _CRS _source_crs cdef readonly _CRS _target_crs cdef readonly _CRS _geodetic_crs cdef readonly CoordinateSystem _coordinate_system cdef readonly CoordinateOperation _coordinate_operation pyproj-3.7.1/pyproj/_crs.pyi000066400000000000000000000164011475425760300160600ustar00rootroot00000000000000from collections.abc import Iterable from typing import Any, NamedTuple, Optional from pyproj.crs.enums import CoordinateOperationType from pyproj.enums import ProjVersion, WktVersion class Axis: name: str abbrev: str direction: str unit_conversion_factor: float unit_name: str unit_auth_code: str unit_code: str class AreaOfUse: west: float south: float east: float north: float name: str @property def bounds(self) -> tuple[float, float, float, float]: ... class Base: name: str @property def remarks(self) -> str: ... @property def scope(self) -> str: ... def to_wkt( self, version: WktVersion | str = WktVersion.WKT2_2019, pretty: bool = False, output_axis_rule: bool | None = None, ) -> str: ... def to_json(self, pretty: bool = False, indentation: int = 2) -> str: ... def to_json_dict(self) -> dict: ... def __eq__(self, other: object) -> bool: ... def is_exact_same(self, other: Any) -> bool: ... class _CRSParts(Base): @classmethod def from_user_input(cls, user_input: Any) -> _CRSParts: ... class Ellipsoid(_CRSParts): semi_major_metre: float semi_minor_metre: float is_semi_minor_computed: float inverse_flattening: float @staticmethod def from_authority(auth_name: str, code: int | str) -> Ellipsoid: ... @staticmethod def from_epsg(code: int | str) -> Ellipsoid: ... @staticmethod def from_string(ellipsoid_string: str) -> Ellipsoid: ... @staticmethod def from_json_dict(ellipsoid_dict: dict) -> Ellipsoid: ... @staticmethod def from_json(ellipsoid_json_str: str) -> Ellipsoid: ... @staticmethod def from_name(ellipsoid_name: str, auth_name: str | None = None) -> Ellipsoid: ... class PrimeMeridian(_CRSParts): longitude: float unit_conversion_factor: str unit_name: str @staticmethod def from_authority(auth_name: str, code: int | str) -> PrimeMeridian: ... @staticmethod def from_epsg(code: int | str) -> PrimeMeridian: ... @staticmethod def from_string(prime_meridian_string: str) -> PrimeMeridian: ... @staticmethod def from_json_dict(prime_meridian_dict: dict) -> PrimeMeridian: ... @staticmethod def from_json(prime_meridian_json_str: str) -> PrimeMeridian: ... @staticmethod def from_name( prime_meridian_name: str, auth_name: str | None = None ) -> PrimeMeridian: ... class Datum(_CRSParts): type_name: str @property def ellipsoid(self) -> Ellipsoid | None: ... @property def prime_meridian(self) -> PrimeMeridian | None: ... @staticmethod def from_authority(auth_name: str, code: int | str) -> Datum: ... @staticmethod def from_epsg(code: int | str) -> Datum: ... @staticmethod def from_string(datum_string: str) -> Datum: ... @staticmethod def from_json_dict(datum_dict: dict) -> Datum: ... @staticmethod def from_json(datum_json_str: str) -> Datum: ... @staticmethod def from_name(datum_name: str, auth_name: str | None = None) -> Datum: ... class CoordinateSystem(_CRSParts): def __init__(self) -> None: ... @property def axis_list(self) -> Iterable[Axis]: ... @staticmethod def from_string(coordinate_system_string: str) -> CoordinateSystem: ... @staticmethod def from_json_dict(coordinate_system_dict: dict) -> CoordinateSystem: ... @staticmethod def from_json(coordinate_system_json_str: str) -> CoordinateSystem: ... def to_cf(self, rotated_pole: bool = False) -> list[dict]: ... class Param: name: str auth_name: str code: str value: str unit_conversion_factor: float unit_name: str unit_auth_name: str unit_code: str unit_category: str class Grid: short_name: str full_name: str package_name: str url: str direct_download: str open_license: str available: str class CoordinateOperation(_CRSParts): method_name: str method_auth_name: str method_code: str accuracy: float is_instantiable: bool has_ballpark_transformation: bool type_name: str @property def params(self) -> Iterable[Param]: ... @property def grids(self) -> Iterable[Grid]: ... @property def area_of_use(self) -> AreaOfUse | None: ... @property def towgs84(self) -> Iterable[float]: ... @property def operations(self) -> tuple[CoordinateOperation]: ... def __init__(self) -> None: ... @staticmethod def from_authority(auth_name: str, code: int | str) -> CoordinateOperation: ... @staticmethod def from_epsg(code: int | str) -> CoordinateOperation: ... @staticmethod def from_string(ellipsoid_string: str) -> CoordinateOperation: ... @staticmethod def from_json_dict(ellipsoid_dict: dict) -> CoordinateOperation: ... @staticmethod def from_json(ellipsoid_json_str: str) -> CoordinateOperation: ... def to_proj4(self, version: ProjVersion | int = ProjVersion.PROJ_5) -> str: ... @staticmethod def from_name( coordinate_operation_name: str, auth_name: str | None = None, coordinate_operation_type: ( CoordinateOperationType | str ) = CoordinateOperationType.CONVERSION, ) -> CoordinateOperation: ... class AuthorityMatchInfo(NamedTuple): auth_name: str code: str confidence: int class _CRS(Base): srs: str type_name: str def __init__(self, proj_string: str) -> None: ... @property def ellipsoid(self) -> Ellipsoid | None: ... @property def area_of_use(self) -> AreaOfUse | None: ... @property def axis_info(self) -> list[Axis]: ... @property def prime_meridian(self) -> PrimeMeridian | None: ... @property def datum(self) -> Datum | None: ... @property def sub_crs_list(self) -> Iterable[_CRS]: ... @property def source_crs(self) -> Optional[_CRS]: ... @property def target_crs(self) -> Optional[_CRS]: ... @property def geodetic_crs(self) -> Optional[_CRS]: ... @property def coordinate_system(self) -> CoordinateSystem | None: ... @property def coordinate_operation(self) -> CoordinateOperation | None: ... def to_proj4(self, version: ProjVersion | int = ProjVersion.PROJ_5) -> str: ... def to_epsg(self, min_confidence: int = 70) -> int | None: ... def to_authority(self, auth_name: str | None = None, min_confidence: int = 70): ... def list_authority( self, auth_name: str | None = None, min_confidence: int = 70 ) -> list[AuthorityMatchInfo]: ... def to_3d(self, name: str | None = None) -> _CRS: ... def to_2d(self, name: str | None = None) -> _CRS: ... @property def is_geographic(self) -> bool: ... @property def is_projected(self) -> bool: ... @property def is_vertical(self) -> bool: ... @property def is_bound(self) -> bool: ... @property def is_compound(self) -> bool: ... @property def is_engineering(self) -> bool: ... @property def is_geocentric(self) -> bool: ... def equals(self, other: Any, ignore_axis_order: bool) -> bool: ... @property def is_deprecated(self) -> bool: ... def get_non_deprecated(self) -> list[_CRS]: ... def is_proj(proj_string: str) -> bool: ... def is_wkt(proj_string: str) -> bool: ... def _load_proj_json(in_proj_json: str) -> dict: ... pyproj-3.7.1/pyproj/_crs.pyx000066400000000000000000002660701475425760300161100ustar00rootroot00000000000000import json import re import warnings from collections import OrderedDict, namedtuple from pyproj._compat cimport cstrdecode, cstrencode from pyproj._context cimport _clear_proj_error, pyproj_context_create from pyproj._context import get_context_manager from pyproj.aoi import AreaOfUse from pyproj.crs.datum import CustomEllipsoid from pyproj.crs.enums import CoordinateOperationType, DatumType from pyproj.enums import ProjVersion, WktVersion from pyproj.exceptions import CRSError from pyproj.geod import pj_ellps from pyproj.utils import NumpyEncoder # This is for looking up the ellipsoid parameters # based on the long name cdef dict _PJ_ELLPS_NAME_MAP = { ellps["description"]: ellps_id for ellps_id, ellps in pj_ellps.items() } cdef str decode_or_undefined(const char* instring): pystr = cstrdecode(instring) if pystr is None: return "undefined" return pystr def is_wkt(str proj_string not None): """ .. versionadded:: 2.0.0 Check if the input projection string is in the Well-Known Text format. Parameters ---------- proj_string: str The projection string. Returns ------- bool: True if the string is in the Well-Known Text format """ cdef bytes b_proj_string = cstrencode(proj_string) return proj_context_guess_wkt_dialect(NULL, b_proj_string) != PJ_GUESSED_NOT_WKT def is_proj(str proj_string not None): """ .. versionadded:: 2.2.2 Check if the input projection string is in the PROJ format. Parameters ---------- proj_string: str The projection string. Returns ------- bool: True if the string is in the PROJ format """ return not is_wkt(proj_string) and "=" in proj_string cdef _to_wkt( PJ_CONTEXT* context, PJ* projobj, object version, bint pretty, bool output_axis_rule=None, ): """ Convert a PJ object to a wkt string. Parameters ---------- context: PJ_CONTEXT* projobj: PJ* wkt_out_type: PJ_WKT_TYPE pretty: bool output_axis_rule: bool or None Return ------ str or None """ # get the output WKT format supported_wkt_types = { WktVersion.WKT2_2015: PJ_WKT2_2015, WktVersion.WKT2_2015_SIMPLIFIED: PJ_WKT2_2015_SIMPLIFIED, WktVersion.WKT2_2018: PJ_WKT2_2019, WktVersion.WKT2_2018_SIMPLIFIED: PJ_WKT2_2019_SIMPLIFIED, WktVersion.WKT2_2019: PJ_WKT2_2019, WktVersion.WKT2_2019_SIMPLIFIED: PJ_WKT2_2019_SIMPLIFIED, WktVersion.WKT1_GDAL: PJ_WKT1_GDAL, WktVersion.WKT1_ESRI: PJ_WKT1_ESRI } cdef PJ_WKT_TYPE wkt_out_type wkt_out_type = supported_wkt_types[WktVersion.create(version)] cdef const char* options_wkt[3] cdef bytes multiline = b"MULTILINE=NO" if pretty: multiline = b"MULTILINE=YES" cdef bytes output_axis = b"OUTPUT_AXIS=AUTO" if output_axis_rule is False: output_axis = b"OUTPUT_AXIS=NO" elif output_axis_rule is True: output_axis = b"OUTPUT_AXIS=YES" options_wkt[0] = multiline options_wkt[1] = output_axis options_wkt[2] = NULL cdef const char* proj_string proj_string = proj_as_wkt( context, projobj, wkt_out_type, options_wkt, ) _clear_proj_error() return cstrdecode(proj_string) cdef _to_proj4( PJ_CONTEXT* context, PJ* projobj, object version, bint pretty, ): """ Convert the projection to a PROJ string. Parameters ---------- context: PJ_CONTEXT* projobj: PJ* version: pyproj.enums.ProjVersion The version of the PROJ string output. pretty: bool Returns ------- str: The PROJ string. """ # get the output PROJ string format supported_prj_types = { ProjVersion.PROJ_4: PJ_PROJ_4, ProjVersion.PROJ_5: PJ_PROJ_5, } cdef PJ_PROJ_STRING_TYPE proj_out_type proj_out_type = supported_prj_types[ProjVersion.create(version)] cdef const char* options[2] cdef bytes multiline = b"MULTILINE=NO" if pretty: multiline = b"MULTILINE=YES" options[0] = multiline options[1] = NULL # convert projection to string cdef const char* proj_string proj_string = proj_as_proj_string( context, projobj, proj_out_type, options, ) _clear_proj_error() return cstrdecode(proj_string) cdef tuple _get_concatenated_operations( PJ_CONTEXT* context, PJ* concatenated_operation ): """ For a PJ* of type concatenated operation, get the operations """ cdef int step_count = proj_concatoperation_get_step_count( context, concatenated_operation, ) cdef PJ* operation = NULL cdef PJ_CONTEXT* sub_context = NULL cdef int iii = 0 operations = [] for iii in range(step_count): sub_context = pyproj_context_create() operation = proj_concatoperation_get_step( sub_context, concatenated_operation, iii, ) operations.append(CoordinateOperation.create(sub_context, operation)) _clear_proj_error() return tuple(operations) cdef PJ * _from_name( PJ_CONTEXT* context, str name_string, str auth_name, PJ_TYPE pj_type, ): """ Create an object from a name. Parameters ---------- context: PJ_CONTEXT* The context to use to create the object. name_string: str Name of object to create. auth_name: str The authority name to refine search. If None, will search all authorities. pj_type: PJ_TYPE The type of PJ * to create. Returns ------- PJ * """ cdef PJ_TYPE[1] pj_types = [pj_type] cdef char* c_auth_name = NULL cdef bytes b_auth_name if auth_name is not None: b_auth_name = cstrencode(auth_name) c_auth_name = b_auth_name cdef PJ_OBJ_LIST *pj_list = proj_create_from_name( context, c_auth_name, cstrencode(name_string), &pj_types, 1, False, 1, NULL, ) if pj_list == NULL or proj_list_get_count(pj_list) <= 0: proj_list_destroy(pj_list) return NULL cdef PJ* datum_pj = proj_list_get(context, pj_list, 0) proj_list_destroy(pj_list) return datum_pj def _load_proj_json(str in_proj_json): try: return json.loads(in_proj_json) except ValueError: raise CRSError("Invalid JSON") cdef class Axis: """ .. versionadded:: 2.0.0 Coordinate System Axis Attributes ---------- name: str abbrev: str direction: str unit_conversion_factor: float unit_name: str unit_auth_code: str unit_code: str """ def __cinit__(self): self.name = "undefined" self.abbrev = "undefined" self.direction = "undefined" self.unit_conversion_factor = float("NaN") self.unit_name = "undefined" self.unit_auth_code = "undefined" self.unit_code = "undefined" def __str__(self): return f"{self.abbrev}[{self.direction}]: {self.name} ({self.unit_name})" def __repr__(self): return ( f"Axis(name={self.name}, abbrev={self.abbrev}, " f"direction={self.direction}, unit_auth_code={self.unit_auth_code}, " f"unit_code={self.unit_code}, unit_name={self.unit_name})" ) @staticmethod cdef Axis create(PJ_CONTEXT* context, PJ* projobj, int index): cdef: Axis axis_info = Axis() const char * name = NULL const char * abbrev = NULL const char * direction = NULL const char * unit_name = NULL const char * unit_auth_code = NULL const char * unit_code = NULL if not proj_cs_get_axis_info( context, projobj, index, &name, &abbrev, &direction, &axis_info.unit_conversion_factor, &unit_name, &unit_auth_code, &unit_code): return None axis_info.name = decode_or_undefined(name) axis_info.abbrev = decode_or_undefined(abbrev) axis_info.direction = decode_or_undefined(direction) axis_info.unit_name = decode_or_undefined(unit_name) axis_info.unit_auth_code = decode_or_undefined(unit_auth_code) axis_info.unit_code = decode_or_undefined(unit_code) return axis_info cdef create_area_of_use(PJ_CONTEXT* context, PJ* projobj): cdef: double west = float("nan") double south = float("nan") double east = float("nan") double north = float("nan") const char * area_name = NULL if not proj_get_area_of_use( context, projobj, &west, &south, &east, &north, &area_name): return None return AreaOfUse( west=west, south=south, east=east, north=north, name=decode_or_undefined(area_name), ) cdef class Base: def __cinit__(self): self.projobj = NULL self.context = NULL self.name = "undefined" self._scope = None self._remarks = None def __dealloc__(self): """destroy projection definition""" if self.projobj != NULL: proj_destroy(self.projobj) cdef _set_base_info(self): """ Set the name of the PJ """ # get proj information cdef const char* proj_name = proj_get_name(self.projobj) self.name = decode_or_undefined(proj_name) cdef const char* scope = proj_get_scope(self.projobj) if scope != NULL and scope != "": self._scope = scope cdef const char* remarks = proj_get_remarks(self.projobj) if remarks != NULL and remarks != "": self._remarks = remarks @property def remarks(self): """ .. versionadded:: 2.4.0 Returns ------- str: Remarks about object. """ return self._remarks @property def scope(self): """ .. versionadded:: 2.4.0 Returns ------- str: Scope of object. """ return self._scope def to_wkt(self, version=WktVersion.WKT2_2019, pretty=False, output_axis_rule=None): """ Convert the projection to a WKT string. Version options: - WKT2_2015 - WKT2_2015_SIMPLIFIED - WKT2_2019 - WKT2_2019_SIMPLIFIED - WKT1_GDAL - WKT1_ESRI .. versionadded:: 3.6.0 output_axis_rule Parameters ---------- version: pyproj.enums.WktVersion, default=pyproj.enums.WktVersion.WKT2_2019 The version of the WKT output. pretty: bool, default=False If True, it will set the output to be a multiline string. output_axis_rule: bool, optional, default=None If True, it will set the axis rule on any case. If false, never. None for AUTO, that depends on the CRS and version. Returns ------- str """ return _to_wkt(self.context, self.projobj, version, pretty=pretty, output_axis_rule=output_axis_rule) def to_json(self, bint pretty=False, int indentation=2): """ .. versionadded:: 2.4.0 Convert the object to a JSON string. Parameters ---------- pretty: bool, default=False If True, it will set the output to be a multiline string. indentation: int, default=2 If pretty is True, it will set the width of the indentation. Returns ------- str """ cdef const char* options[3] multiline = b"MULTILINE=NO" if pretty: multiline = b"MULTILINE=YES" indentation_width = cstrencode(f"INDENTATION_WIDTH={indentation:.0f}") options[0] = multiline options[1] = indentation_width options[2] = NULL cdef const char* proj_json_string = proj_as_projjson( self.context, self.projobj, options, ) return cstrdecode(proj_json_string) def to_json_dict(self): """ .. versionadded:: 2.4.0 Convert the object to a JSON dictionary. Returns ------- dict """ return json.loads(self.to_json()) def __str__(self): return self.name def __repr__(self): return self.to_wkt(pretty=True) def _is_exact_same(self, Base other): return proj_is_equivalent_to_with_ctx( self.context, self.projobj, other.projobj, PJ_COMP_STRICT) == 1 def _is_equivalent(self, Base other): return proj_is_equivalent_to_with_ctx( self.context, self.projobj, other.projobj, PJ_COMP_EQUIVALENT) == 1 def __eq__(self, other): if not isinstance(other, Base): return False return self._is_equivalent(other) def is_exact_same(self, other): """Compares projection objects to see if they are exactly the same.""" if not isinstance(other, Base): return False return self._is_exact_same(other) cdef class _CRSParts(Base): @classmethod def from_user_input(cls, user_input): """ .. versionadded:: 2.5.0 Create cls from user input: - PROJ JSON string - PROJ JSON dict - WKT string - An authority string - An EPSG integer code - An iterable of ("auth_name", "auth_code") - An object with a `to_json` method. Parameters ---------- user_input: str, dict, int, Iterable[str, str] Input to create cls. Returns ------- cls """ if isinstance(user_input, str): prepared = cls.from_string(user_input) elif isinstance(user_input, dict): prepared = cls.from_json_dict(user_input) elif isinstance(user_input, int) and hasattr(cls, "from_epsg"): prepared = cls.from_epsg(user_input) elif ( isinstance(user_input, (list, tuple)) and len(user_input) == 2 and hasattr(cls, "from_authority") ): prepared = cls.from_authority(*user_input) elif hasattr(user_input, "to_json"): prepared = cls.from_json(user_input.to_json()) else: raise CRSError(f"Invalid {cls.__name__} input: {user_input!r}") return prepared def __eq__(self, other): try: other = self.from_user_input(other) except CRSError: return False return self._is_equivalent(other) cdef dict _COORD_SYSTEM_TYPE_MAP = { PJ_CS_TYPE_UNKNOWN: "unknown", PJ_CS_TYPE_CARTESIAN: "cartesian", PJ_CS_TYPE_ELLIPSOIDAL: "ellipsoidal", PJ_CS_TYPE_VERTICAL: "vertical", PJ_CS_TYPE_SPHERICAL: "spherical", PJ_CS_TYPE_ORDINAL: "ordinal", PJ_CS_TYPE_PARAMETRIC: "parametric", PJ_CS_TYPE_DATETIMETEMPORAL: "datetimetemporal", PJ_CS_TYPE_TEMPORALCOUNT: "temporalcount", PJ_CS_TYPE_TEMPORALMEASURE: "temporalmeasure", } cdef class CoordinateSystem(_CRSParts): """ .. versionadded:: 2.2.0 Coordinate System for CRS Attributes ---------- name: str The name of the coordinate system. """ def __cinit__(self): self._axis_list = None def __init__(self): raise RuntimeError("CoordinateSystem is not initializable.") @staticmethod cdef CoordinateSystem create(PJ_CONTEXT* context, PJ* coord_system_pj): cdef CoordinateSystem coord_system = CoordinateSystem.__new__(CoordinateSystem) coord_system.context = context coord_system._context_manager = get_context_manager() coord_system.projobj = coord_system_pj cdef PJ_COORDINATE_SYSTEM_TYPE cs_type = proj_cs_get_type( coord_system.context, coord_system.projobj, ) coord_system.name = _COORD_SYSTEM_TYPE_MAP[cs_type] return coord_system @property def axis_list(self): """ Returns ------- list[Axis]: The Axis list for the coordinate system. """ if self._axis_list is not None: return self._axis_list self._axis_list = [] cdef int num_axes = 0 num_axes = proj_cs_get_axis_count( self.context, self.projobj ) for axis_idx from 0 <= axis_idx < num_axes: self._axis_list.append( Axis.create( self.context, self.projobj, axis_idx ) ) return self._axis_list @staticmethod def from_string(str coordinate_system_string not None): """ .. versionadded:: 2.5.0 .. note:: Only works with PROJ JSON. Create a Coordinate System from a string. Parameters ---------- coordinate_system_string: str Coordinate System string. Returns ------- CoordinateSystem """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coordinate_system_pj = proj_create( context, cstrencode(coordinate_system_string) ) if coordinate_system_pj == NULL or proj_cs_get_type( context, coordinate_system_pj, ) == PJ_CS_TYPE_UNKNOWN: proj_destroy(coordinate_system_pj) raise CRSError( "Invalid coordinate system string: " f"{coordinate_system_string}" ) _clear_proj_error() return CoordinateSystem.create(context, coordinate_system_pj) @staticmethod def from_json_dict(dict coordinate_system_dict not None): """ .. versionadded:: 2.5.0 Create Coordinate System from a JSON dictionary. Parameters ---------- coordinate_system_dict: str Coordinate System dictionary. Returns ------- CoordinateSystem """ return CoordinateSystem.from_string( json.dumps(coordinate_system_dict, cls=NumpyEncoder) ) @staticmethod def from_json(str coordinate_system_json_str not None): """ .. versionadded:: 2.5.0 Create Coordinate System from a JSON string. Parameters ---------- coordinate_system_json_str: str Coordinate System JSON string. Returns ------- CoordinateSystem """ return CoordinateSystem.from_json_dict( _load_proj_json(coordinate_system_json_str) ) def to_cf(self, bint rotated_pole=False): """ .. versionadded:: 3.0.0 This converts a :obj:`pyproj.crs.CoordinateSystem` axis to a list of Climate and Forecast (CF) Version 1.8 dicts. Parameters ---------- rotated_pole: bool, default=False If True, the geographic coordinates are on a rotated pole grid. This corresponds to the rotated_latitude_longitude grid_mapping_name. Returns ------- list[dict]: CF-1.8 version of the CoordinateSystem. """ axis_list = self.to_json_dict()["axis"] cf_params = [] def get_linear_unit(axis): try: return f'{axis["unit"]["conversion_factor"]} metre' except TypeError: return axis["unit"] if self.name == "cartesian": for axis in axis_list: if axis["name"].lower() == "easting": cf_axis = "X" else: cf_axis = "Y" cf_params.append(dict( axis=cf_axis, long_name=axis["name"], standard_name=f"projection_{cf_axis.lower()}_coordinate", units=get_linear_unit(axis), )) elif self.name == "ellipsoidal": for axis in axis_list: if axis["abbreviation"].upper() in ("D", "H"): cf_params.append(dict( standard_name="height_above_reference_ellipsoid", long_name=axis["name"], units=axis["unit"], positive=axis["direction"], axis="Z", )) else: if "longitude" in axis["name"].lower(): cf_axis = "X" name = "longitude" else: cf_axis = "Y" name = "latitude" if rotated_pole: cf_params.append(dict( standard_name=f"grid_{name}", long_name=f"{name} in rotated pole grid", units="degrees", axis=cf_axis, )) else: cf_params.append(dict( standard_name=name, long_name=f"{name} coordinate", units=f'degrees_{axis["direction"]}', axis=cf_axis, )) elif self.name == "vertical": for axis in axis_list: cf_params.append(dict( standard_name="height_above_reference_ellipsoid", long_name=axis["name"], units=get_linear_unit(axis), positive=axis["direction"], axis="Z", )) return cf_params cdef class Ellipsoid(_CRSParts): """ .. versionadded:: 2.0.0 Ellipsoid for CRS Attributes ---------- name: str The name of the ellipsoid. is_semi_minor_computed: int 1 if True, 0 if False semi_major_metre: float The semi major axis in meters of the ellipsoid. semi_minor_metre: float The semi minor axis in meters of the ellipsoid. inverse_flattening: float The inverse flattening of the ellipsoid. """ def __cinit__(self): # load in ellipsoid information if applicable self.semi_major_metre = float("NaN") self.semi_minor_metre = float("NaN") self.is_semi_minor_computed = False self.inverse_flattening = float("NaN") def __init__(self): raise RuntimeError( "Ellipsoid can only be initialized like 'Ellipsoid.from_*()'." ) @staticmethod cdef Ellipsoid create(PJ_CONTEXT* context, PJ* ellipsoid_pj): cdef Ellipsoid ellips = Ellipsoid.__new__(Ellipsoid) ellips.context = context ellips._context_manager = get_context_manager() ellips.projobj = ellipsoid_pj cdef int is_semi_minor_computed = 0 proj_ellipsoid_get_parameters( context, ellipsoid_pj, &ellips.semi_major_metre, &ellips.semi_minor_metre, &is_semi_minor_computed, &ellips.inverse_flattening, ) ellips.is_semi_minor_computed = is_semi_minor_computed == 1 ellips._set_base_info() _clear_proj_error() return ellips @staticmethod def from_authority(str auth_name not None, code not None): """ .. versionadded:: 2.2.0 Create an Ellipsoid from an authority code. Parameters ---------- auth_name: str Name of the authority. code: str or int The code used by the authority. Returns ------- Ellipsoid """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* ellipsoid_pj = proj_create_from_database( context, cstrencode(auth_name), cstrencode(str(code)), PJ_CATEGORY_ELLIPSOID, False, NULL, ) if ellipsoid_pj == NULL: raise CRSError(f"Invalid authority or code ({auth_name}, {code})") _clear_proj_error() return Ellipsoid.create(context, ellipsoid_pj) @staticmethod def from_epsg(code not None): """ .. versionadded:: 2.2.0 Create an Ellipsoid from an EPSG code. Parameters ---------- code: str or int The code used by the EPSG. Returns ------- Ellipsoid """ return Ellipsoid.from_authority("EPSG", code) @staticmethod def _from_string(str ellipsoid_string not None): """ Create an Ellipsoid from a string. Examples: - urn:ogc:def:ellipsoid:EPSG::7001 - ELLIPSOID["Airy 1830",6377563.396,299.3249646, LENGTHUNIT["metre",1], ID["EPSG",7001]] Parameters ---------- ellipsoid_string: str Ellipsoid string. Returns ------- Ellipsoid """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* ellipsoid_pj = proj_create( context, cstrencode(ellipsoid_string) ) if ellipsoid_pj == NULL or proj_get_type(ellipsoid_pj) != PJ_TYPE_ELLIPSOID: proj_destroy(ellipsoid_pj) raise CRSError( f"Invalid ellipsoid string: {ellipsoid_string}" ) _clear_proj_error() return Ellipsoid.create(context, ellipsoid_pj) @staticmethod def from_string(str ellipsoid_string not None): """ .. versionadded:: 2.2.0 Create an Ellipsoid from a string. Examples: - urn:ogc:def:ellipsoid:EPSG::7001 - ELLIPSOID["Airy 1830",6377563.396,299.3249646, LENGTHUNIT["metre",1], ID["EPSG",7001]] - WGS 84 Parameters ---------- ellipsoid_string: str Ellipsoid string. Returns ------- Ellipsoid """ try: return Ellipsoid._from_string(ellipsoid_string) except CRSError as crs_err: try: return Ellipsoid.from_name(ellipsoid_string) except CRSError: raise crs_err @staticmethod def from_json_dict(dict ellipsoid_dict not None): """ .. versionadded:: 2.4.0 Create Ellipsoid from a JSON dictionary. Parameters ---------- ellipsoid_dict: str Ellipsoid dictionary. Returns ------- Ellipsoid """ return Ellipsoid._from_string(json.dumps(ellipsoid_dict, cls=NumpyEncoder)) @staticmethod def from_json(str ellipsoid_json_str not None): """ .. versionadded:: 2.4.0 Create Ellipsoid from a JSON string. Parameters ---------- ellipsoid_json_str: str Ellipsoid JSON string. Returns ------- Ellipsoid """ return Ellipsoid.from_json_dict(_load_proj_json(ellipsoid_json_str)) @staticmethod def _from_name( str ellipsoid_name, str auth_name, ): """ .. versionadded:: 2.5.0 Create a Ellipsoid from a name. Parameters ---------- ellipsoid_name: str Ellipsoid name. auth_name: str The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. Returns ------- Ellipsoid """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* ellipsoid_pj = _from_name( context, ellipsoid_name, auth_name, PJ_TYPE_ELLIPSOID, ) if ellipsoid_pj == NULL: raise CRSError(f"Invalid ellipsoid name: {ellipsoid_name}") _clear_proj_error() return Ellipsoid.create(context, ellipsoid_pj) @staticmethod def from_name( str ellipsoid_name not None, str auth_name=None, ): """ .. versionadded:: 2.5.0 Create a Ellipsoid from a name. Examples: - WGS 84 Parameters ---------- ellipsoid_name: str Ellipsoid name. auth_name: str, optional The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. Returns ------- Ellipsoid """ try: return Ellipsoid._from_name( ellipsoid_name=ellipsoid_name, auth_name=auth_name, ) except CRSError: if auth_name not in ("PROJ", None): raise pass # add support for past names for PROJ ellipsoids try: ellipsoid_params = pj_ellps[ _PJ_ELLPS_NAME_MAP.get(ellipsoid_name, ellipsoid_name) ] except KeyError: raise CRSError(f"Invalid ellipsoid name: {ellipsoid_name}") return CustomEllipsoid( name=ellipsoid_params["description"], semi_major_axis=ellipsoid_params["a"], semi_minor_axis=ellipsoid_params.get("b"), inverse_flattening=ellipsoid_params.get("rf"), ) cdef class PrimeMeridian(_CRSParts): """ .. versionadded:: 2.0.0 Prime Meridian for CRS Attributes ---------- name: str The name of the prime meridian. unit_name: str The unit name for the prime meridian. """ def __cinit__(self): self.unit_name = None def __init__(self): raise RuntimeError( "PrimeMeridian can only be initialized like 'PrimeMeridian.from_*()'." ) @staticmethod cdef PrimeMeridian create(PJ_CONTEXT* context, PJ* prime_meridian_pj): cdef PrimeMeridian prime_meridian = PrimeMeridian.__new__(PrimeMeridian) prime_meridian.context = context prime_meridian._context_manager = get_context_manager() prime_meridian.projobj = prime_meridian_pj cdef const char * unit_name proj_prime_meridian_get_parameters( prime_meridian.context, prime_meridian.projobj, &prime_meridian.longitude, &prime_meridian.unit_conversion_factor, &unit_name, ) prime_meridian.unit_name = decode_or_undefined(unit_name) prime_meridian._set_base_info() _clear_proj_error() return prime_meridian @staticmethod def from_authority(str auth_name not None, code not None): """ .. versionadded:: 2.2.0 Create a PrimeMeridian from an authority code. Parameters ---------- auth_name: str Name of the authority. code: str or int The code used by the authority. Returns ------- PrimeMeridian """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* prime_meridian_pj = proj_create_from_database( context, cstrencode(auth_name), cstrencode(str(code)), PJ_CATEGORY_PRIME_MERIDIAN, False, NULL, ) if prime_meridian_pj == NULL: raise CRSError(f"Invalid authority or code ({auth_name}, {code})") _clear_proj_error() return PrimeMeridian.create(context, prime_meridian_pj) @staticmethod def from_epsg(code not None): """ .. versionadded:: 2.2.0 Create a PrimeMeridian from an EPSG code. Parameters ---------- code: str or int The code used by EPSG. Returns ------- PrimeMeridian """ return PrimeMeridian.from_authority("EPSG", code) @staticmethod def _from_string(str prime_meridian_string not None): """ Create an PrimeMeridian from a string. Examples: - urn:ogc:def:meridian:EPSG::8901 - PRIMEM["Greenwich",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8901]] Parameters ---------- prime_meridian_string: str prime meridian string. Returns ------- PrimeMeridian """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* prime_meridian_pj = proj_create( context, cstrencode(prime_meridian_string) ) if ( prime_meridian_pj == NULL or proj_get_type(prime_meridian_pj) != PJ_TYPE_PRIME_MERIDIAN ): proj_destroy(prime_meridian_pj) raise CRSError( f"Invalid prime meridian string: {prime_meridian_string}" ) _clear_proj_error() return PrimeMeridian.create(context, prime_meridian_pj) @staticmethod def from_string(str prime_meridian_string not None): """ .. versionadded:: 2.2.0 Create an PrimeMeridian from a string. Examples: - urn:ogc:def:meridian:EPSG::8901 - PRIMEM["Greenwich",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8901]] - Greenwich Parameters ---------- prime_meridian_string: str prime meridian string. Returns ------- PrimeMeridian """ try: return PrimeMeridian._from_string(prime_meridian_string) except CRSError as crs_err: try: return PrimeMeridian.from_name(prime_meridian_string) except CRSError: raise crs_err @staticmethod def from_json_dict(dict prime_meridian_dict not None): """ .. versionadded:: 2.4.0 Create PrimeMeridian from a JSON dictionary. Parameters ---------- prime_meridian_dict: str PrimeMeridian dictionary. Returns ------- PrimeMeridian """ return PrimeMeridian._from_string( json.dumps(prime_meridian_dict, cls=NumpyEncoder) ) @staticmethod def from_json(str prime_meridian_json_str not None): """ .. versionadded:: 2.4.0 Create PrimeMeridian from a JSON string. Parameters ---------- prime_meridian_json_str: str PrimeMeridian JSON string. Returns ------- PrimeMeridian """ return PrimeMeridian.from_json_dict(_load_proj_json(prime_meridian_json_str)) @staticmethod def from_name( str prime_meridian_name not None, str auth_name=None, ): """ .. versionadded:: 2.5.0 Create a Prime Meridian from a name. Examples: - Greenwich Parameters ---------- prime_meridian_name: str Prime Meridian name. auth_name: str, optional The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. Returns ------- PrimeMeridian """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* prime_meridian_pj = _from_name( context, prime_meridian_name, auth_name, PJ_TYPE_PRIME_MERIDIAN, ) if prime_meridian_pj == NULL: raise CRSError( f"Invalid prime meridian name: {prime_meridian_name}" ) _clear_proj_error() return PrimeMeridian.create(context, prime_meridian_pj) cdef dict _DATUM_TYPE_MAP = { PJ_TYPE_GEODETIC_REFERENCE_FRAME: "Geodetic Reference Frame", PJ_TYPE_DYNAMIC_GEODETIC_REFERENCE_FRAME: "Dynamic Geodetic Reference Frame", PJ_TYPE_VERTICAL_REFERENCE_FRAME: "Vertical Reference Frame", PJ_TYPE_DYNAMIC_VERTICAL_REFERENCE_FRAME: "Dynamic Vertical Reference Frame", PJ_TYPE_DATUM_ENSEMBLE: "Datum Ensemble", PJ_TYPE_TEMPORAL_DATUM: "Temporal Datum", PJ_TYPE_ENGINEERING_DATUM: "Engineering Datum", PJ_TYPE_PARAMETRIC_DATUM: "Parametric Datum", } cdef dict _PJ_DATUM_TYPE_MAP = { DatumType.DATUM_ENSEMBLE: PJ_TYPE_DATUM_ENSEMBLE, DatumType.GEODETIC_REFERENCE_FRAME: PJ_TYPE_GEODETIC_REFERENCE_FRAME, DatumType.DYNAMIC_GEODETIC_REFERENCE_FRAME: PJ_TYPE_DYNAMIC_GEODETIC_REFERENCE_FRAME, DatumType.VERTICAL_REFERENCE_FRAME: PJ_TYPE_VERTICAL_REFERENCE_FRAME, DatumType.DYNAMIC_VERTICAL_REFERENCE_FRAME: PJ_TYPE_DYNAMIC_VERTICAL_REFERENCE_FRAME, } cdef class Datum(_CRSParts): """ .. versionadded:: 2.2.0 Datum for CRS. If it is a compound CRS it is the horizontal datum. Attributes ---------- name: str The name of the datum. """ def __cinit__(self): self._ellipsoid = None self._prime_meridian = None def __init__(self): raise RuntimeError( "Datum can only be initialized like 'Datum.from_*()'." ) @staticmethod cdef Datum create(PJ_CONTEXT* context, PJ* datum_pj): cdef Datum datum = Datum.__new__(Datum) datum.context = context datum._context_manager = get_context_manager() datum.projobj = datum_pj datum._set_base_info() datum.type_name = _DATUM_TYPE_MAP[proj_get_type(datum.projobj)] return datum @staticmethod def _from_authority(str auth_name not None, code not None, PJ_CATEGORY category): """ Create a Datum from an authority code. Parameters ---------- auth_name: str Name of the authority. code: str or int The code used by the authority. Returns ------- Datum """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* datum_pj = proj_create_from_database( context, cstrencode(auth_name), cstrencode(str(code)), category, False, NULL, ) if datum_pj == NULL: raise CRSError(f"Invalid authority or code ({auth_name}, {code})") _clear_proj_error() return Datum.create(context, datum_pj) @staticmethod def from_authority(str auth_name not None, code not None): """ Create a Datum from an authority code. Parameters ---------- auth_name: str Name of the authority. code: str or int The code used by the authority. Returns ------- Datum """ try: return Datum._from_authority(auth_name, code, PJ_CATEGORY_DATUM_ENSEMBLE) except CRSError: return Datum._from_authority(auth_name, code, PJ_CATEGORY_DATUM) @staticmethod def from_epsg(code not None): """ Create a Datum from an EPSG code. Parameters ---------- code: str or int The code used by EPSG. Returns ------- Datum """ return Datum.from_authority("EPSG", code) @staticmethod def _from_string(str datum_string not None): """ Create a Datum from a string. Examples: - urn:ogc:def:datum:EPSG::6326 - DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84",6378137,298.257223563, LENGTHUNIT["metre",1]], ID["EPSG",6326]] Parameters ---------- datum_string: str Datum string. Returns ------- Datum """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* datum_pj = proj_create( context, cstrencode(datum_string) ) if ( datum_pj == NULL or proj_get_type(datum_pj) not in _DATUM_TYPE_MAP ): proj_destroy(datum_pj) raise CRSError(f"Invalid datum string: {datum_string}") _clear_proj_error() return Datum.create(context, datum_pj) @staticmethod def from_string(str datum_string not None): """ Create a Datum from a string. Examples: - urn:ogc:def:datum:EPSG::6326 - DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84",6378137,298.257223563, LENGTHUNIT["metre",1]], ID["EPSG",6326]] - World Geodetic System 1984 Parameters ---------- datum_string: str Datum string. Returns ------- Datum """ try: return Datum._from_string(datum_string) except CRSError as crs_err: try: return Datum.from_name(datum_string) except CRSError: raise crs_err @staticmethod def _from_name( str datum_name, str auth_name, object datum_type, ): """ .. versionadded:: 2.5.0 Create a Datum from a name. Parameters ---------- datum_name: str Datum name. auth_name: str The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. datum_type: DatumType The datum type to create. Returns ------- Datum """ pj_datum_type = _PJ_DATUM_TYPE_MAP[datum_type] cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* datum_pj = _from_name( context, datum_name, auth_name, pj_datum_type, ) if datum_pj == NULL: raise CRSError(f"Invalid datum name: {datum_name}") _clear_proj_error() return Datum.create(context, datum_pj) @staticmethod def from_name( str datum_name not None, str auth_name=None, datum_type=None, ): """ .. versionadded:: 2.5.0 Create a Datum from a name. Examples: - WGS 84 - World Geodetic System 1984 Parameters ---------- datum_name: str Datum name. auth_name: str, optional The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. datum_type: DatumType, optional The datum type to create. If it is None, it uses any datum type. Returns ------- Datum """ if datum_type is None: # try creating name from all datum types first_error = None for datum_type in _PJ_DATUM_TYPE_MAP: try: return Datum.from_name( datum_name=datum_name, auth_name=auth_name, datum_type=datum_type, ) except CRSError as err: if first_error is None: first_error = err raise first_error datum_type = DatumType.create(datum_type) return Datum._from_name( datum_name=datum_name, auth_name=auth_name, datum_type=datum_type, ) @staticmethod def from_json_dict(dict datum_dict not None): """ .. versionadded:: 2.4.0 Create Datum from a JSON dictionary. Parameters ---------- datum_dict: str Datum dictionary. Returns ------- Datum """ return Datum._from_string(json.dumps(datum_dict, cls=NumpyEncoder)) @staticmethod def from_json(str datum_json_str not None): """ .. versionadded:: 2.4.0 Create Datum from a JSON string. Parameters ---------- datum_json_str: str Datum JSON string. Returns ------- Datum """ return Datum.from_json_dict(_load_proj_json(datum_json_str)) @property def ellipsoid(self): """ Returns ------- Ellipsoid: The ellipsoid object with associated attributes. """ if self._ellipsoid is not None: return None if self._ellipsoid is False else self._ellipsoid cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* ellipsoid_pj = proj_get_ellipsoid( context, self.projobj, ) _clear_proj_error() if ellipsoid_pj == NULL: self._ellipsoid = False return None self._ellipsoid = Ellipsoid.create(context, ellipsoid_pj) return self._ellipsoid @property def prime_meridian(self): """ Returns ------- PrimeMeridian: The CRS prime meridian object with associated attributes. """ if self._prime_meridian is not None: return None if self._prime_meridian is False else self._prime_meridian cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* prime_meridian_pj = proj_get_prime_meridian( context, self.projobj, ) _clear_proj_error() if prime_meridian_pj == NULL: self._prime_meridian = False return None self._prime_meridian = PrimeMeridian.create( context, prime_meridian_pj, ) return self._prime_meridian cdef class Param: """ .. versionadded:: 2.2.0 Coordinate operation parameter. Attributes ---------- name: str The name of the parameter. auth_name: str The authority name of the parameter (i.e. EPSG). code: str The code of the parameter (i.e. 9807). value: str or double The value of the parameter. unit_conversion_factor: double The factor to convert to meters. unit_name: str The name of the unit. unit_auth_name: str The authority name of the unit (i.e. EPSG). unit_code: str The code of the unit (i.e. 9807). unit_category: str The category of the unit (“unknown”, “none”, “linear”, “angular”, “scale”, “time” or “parametric”). """ def __cinit__(self): self.name = "undefined" self.auth_name = "undefined" self.code = "undefined" self.value = "undefined" self.unit_conversion_factor = float("nan") self.unit_name = "undefined" self.unit_auth_name = "undefined" self.unit_code = "undefined" self.unit_category = "undefined" @staticmethod cdef Param create(PJ_CONTEXT* context, PJ* projobj, int param_idx): cdef: Param param = Param() const char *out_name const char *out_auth_name const char *out_code const char *out_value const char *out_value_string const char *out_unit_name const char *out_unit_auth_name const char *out_unit_code const char *out_unit_category double value_double proj_coordoperation_get_param( context, projobj, param_idx, &out_name, &out_auth_name, &out_code, &value_double, &out_value_string, ¶m.unit_conversion_factor, &out_unit_name, &out_unit_auth_name, &out_unit_code, &out_unit_category ) param.name = decode_or_undefined(out_name) param.auth_name = decode_or_undefined(out_auth_name) param.code = decode_or_undefined(out_code) param.unit_name = decode_or_undefined(out_unit_name) param.unit_auth_name = decode_or_undefined(out_unit_auth_name) param.unit_code = decode_or_undefined(out_unit_code) param.unit_category = decode_or_undefined(out_unit_category) value_string = cstrdecode(out_value_string) param.value = value_double if value_string is None else value_string return param def __str__(self): return f"{self.auth_name}:{self.auth_code}" def __repr__(self): return ( f"Param(name={self.name}, auth_name={self.auth_name}, code={self.code}, " f"value={self.value}, unit_name={self.unit_name}, " f"unit_auth_name={self.unit_auth_name}, unit_code={self.unit_code}, " f"unit_category={self.unit_category})" ) cdef class Grid: """ .. versionadded:: 2.2.0 Coordinate operation grid. Attributes ---------- short_name: str The short name of the grid. full_name: str The full name of the grid. package_name: str The package name where the grid might be found. url: str The grid URL or the package URL where the grid might be found. direct_download: int If 1, *url* can be downloaded directly. open_license: int If 1, the grid is released with an open license. available: int If 1, the grid is available at runtime. """ def __cinit__(self): self.short_name = "undefined" self.full_name = "undefined" self.package_name = "undefined" self.url = "undefined" self.direct_download = False self.open_license = False self.available = False @staticmethod cdef Grid create(PJ_CONTEXT* context, PJ* projobj, int grid_idx): cdef: Grid grid = Grid() const char *out_short_name const char *out_full_name const char *out_package_name const char *out_url int direct_download = 0 int open_license = 0 int available = 0 proj_coordoperation_get_grid_used( context, projobj, grid_idx, &out_short_name, &out_full_name, &out_package_name, &out_url, &direct_download, &open_license, &available ) grid.short_name = decode_or_undefined(out_short_name) grid.full_name = decode_or_undefined(out_full_name) grid.package_name = decode_or_undefined(out_package_name) grid.url = decode_or_undefined(out_url) grid.direct_download = direct_download == 1 grid.open_license = open_license == 1 grid.available = available == 1 _clear_proj_error() return grid def __str__(self): return self.full_name def __repr__(self): return ( f"Grid(short_name={self.short_name}, full_name={self.full_name}, " f"package_name={self.package_name}, url={self.url}, " f"direct_download={self.direct_download}, " f"open_license={self.open_license}, available={self.available})" ) cdef dict _COORDINATE_OPERATION_TYPE_MAP = { PJ_TYPE_UNKNOWN: "Unknown", PJ_TYPE_CONVERSION: "Conversion", PJ_TYPE_TRANSFORMATION: "Transformation", PJ_TYPE_CONCATENATED_OPERATION: "Concatenated Operation", PJ_TYPE_OTHER_COORDINATE_OPERATION: "Other Coordinate Operation", } cdef dict _PJ_COORDINATE_OPERATION_TYPE_MAP = { CoordinateOperationType.CONVERSION: PJ_TYPE_CONVERSION, CoordinateOperationType.TRANSFORMATION: PJ_TYPE_TRANSFORMATION, CoordinateOperationType.CONCATENATED_OPERATION: PJ_TYPE_CONCATENATED_OPERATION, CoordinateOperationType.OTHER_COORDINATE_OPERATION: PJ_TYPE_OTHER_COORDINATE_OPERATION, } cdef class CoordinateOperation(_CRSParts): """ .. versionadded:: 2.2.0 Coordinate operation for CRS. Attributes ---------- name: str The name of the method(projection) with authority information. method_name: str The method (projection) name. method_auth_name: str The method authority name. method_code: str The method code. is_instantiable: int If 1, a coordinate operation can be instantiated as a PROJ pipeline. This also checks that referenced grids are available. has_ballpark_transformation: int If 1, the coordinate operation has a “ballpark” transformation, that is a very approximate one, due to lack of more accurate transformations. accuracy: float The accuracy (in metre) of a coordinate operation. """ def __cinit__(self): self._params = None self._grids = None self._area_of_use = None self.method_name = "undefined" self.method_auth_name = "undefined" self.method_code = "undefined" self.is_instantiable = False self.has_ballpark_transformation = False self.accuracy = float("nan") self._towgs84 = None self._operations = None def __init__(self): raise RuntimeError( "CoordinateOperation can only be initialized like " "CoordinateOperation.from_*()'." ) @staticmethod cdef CoordinateOperation create(PJ_CONTEXT* context, PJ* coord_operation_pj): cdef CoordinateOperation coord_operation = CoordinateOperation.__new__( CoordinateOperation ) coord_operation.context = context coord_operation._context_manager = get_context_manager() coord_operation.projobj = coord_operation_pj cdef const char *out_method_name = NULL cdef const char *out_method_auth_name = NULL cdef const char *out_method_code = NULL proj_coordoperation_get_method_info( coord_operation.context, coord_operation.projobj, &out_method_name, &out_method_auth_name, &out_method_code ) coord_operation._set_base_info() coord_operation.method_name = decode_or_undefined(out_method_name) coord_operation.method_auth_name = decode_or_undefined(out_method_auth_name) coord_operation.method_code = decode_or_undefined(out_method_code) coord_operation.accuracy = proj_coordoperation_get_accuracy( coord_operation.context, coord_operation.projobj ) coord_operation.is_instantiable = proj_coordoperation_is_instantiable( coord_operation.context, coord_operation.projobj ) == 1 coord_operation.has_ballpark_transformation = \ proj_coordoperation_has_ballpark_transformation( coord_operation.context, coord_operation.projobj ) == 1 cdef PJ_TYPE operation_type = proj_get_type(coord_operation.projobj) coord_operation.type_name = _COORDINATE_OPERATION_TYPE_MAP[operation_type] _clear_proj_error() return coord_operation @staticmethod def from_authority( str auth_name not None, code not None, bint use_proj_alternative_grid_names=False, ): """ Create a CoordinateOperation from an authority code. Parameters ---------- auth_name: str Name of the authority. code: str or int The code used by the authority. use_proj_alternative_grid_names: bool, default=False Use the PROJ alternative grid names. Returns ------- CoordinateOperation """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coord_operation_pj = proj_create_from_database( context, cstrencode(auth_name), cstrencode(str(code)), PJ_CATEGORY_COORDINATE_OPERATION, use_proj_alternative_grid_names, NULL, ) if coord_operation_pj == NULL: raise CRSError(f"Invalid authority or code ({auth_name}, {code})") _clear_proj_error() return CoordinateOperation.create(context, coord_operation_pj) @staticmethod def from_epsg(code not None, bint use_proj_alternative_grid_names= False): """ Create a CoordinateOperation from an EPSG code. Parameters ---------- code: str or int The code used by EPSG. use_proj_alternative_grid_names: bool, default=False Use the PROJ alternative grid names. Returns ------- CoordinateOperation """ return CoordinateOperation.from_authority( "EPSG", code, use_proj_alternative_grid_names ) @staticmethod def _from_string(str coordinate_operation_string not None): """ Create a CoordinateOperation from a string. Example: - urn:ogc:def:coordinateOperation:EPSG::1671 Parameters ---------- coordinate_operation_string: str Coordinate operation string. Returns ------- CoordinateOperation """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coord_operation_pj = proj_create( context, cstrencode(coordinate_operation_string) ) if ( coord_operation_pj == NULL or proj_get_type(coord_operation_pj) not in ( PJ_TYPE_CONVERSION, PJ_TYPE_TRANSFORMATION, PJ_TYPE_CONCATENATED_OPERATION, PJ_TYPE_OTHER_COORDINATE_OPERATION, ) ): proj_destroy(coord_operation_pj) raise CRSError( "Invalid coordinate operation string: " f"{coordinate_operation_string}" ) _clear_proj_error() return CoordinateOperation.create(context, coord_operation_pj) @staticmethod def from_string(str coordinate_operation_string not None): """ Create a CoordinateOperation from a string. Example: - urn:ogc:def:coordinateOperation:EPSG::1671 - UTM zone 14N - +proj=utm +zone=14 Parameters ---------- coordinate_operation_string: str Coordinate operation string. Returns ------- CoordinateOperation """ try: return CoordinateOperation._from_string(coordinate_operation_string) except CRSError as crs_err: try: return CoordinateOperation.from_name(coordinate_operation_string) except CRSError: raise crs_err @staticmethod def from_json_dict(dict coordinate_operation_dict not None): """ Create CoordinateOperation from a JSON dictionary. .. versionadded:: 2.4.0 Parameters ---------- coordinate_operation_dict: str CoordinateOperation dictionary. Returns ------- CoordinateOperation """ return CoordinateOperation._from_string( json.dumps(coordinate_operation_dict, cls=NumpyEncoder) ) @staticmethod def from_json(str coordinate_operation_json_str not None): """ Create CoordinateOperation from a JSON string. .. versionadded:: 2.4.0 Parameters ---------- coordinate_operation_json_str: str CoordinateOperation JSON string. Returns ------- CoordinateOperation """ return CoordinateOperation.from_json_dict( _load_proj_json(coordinate_operation_json_str )) @staticmethod def from_name( str coordinate_operation_name not None, str auth_name=None, coordinate_operation_type not None=CoordinateOperationType.CONVERSION, ): """ .. versionadded:: 2.5.0 Create a Coordinate Operation from a name. Examples: - UTM zone 14N Parameters ---------- coordinate_operation_name: str Coordinate Operation name. auth_name: str, optional The authority name to refine search (e.g. 'EPSG'). If None, will search all authorities. coordinate_operation_type: CoordinateOperationType, optional The coordinate operation type to create. Default is ``pyproj.crs.enums.CoordinateOperationType.CONVERSION`` Returns ------- CoordinateOperation """ pj_coordinate_operation_type = _PJ_COORDINATE_OPERATION_TYPE_MAP[ CoordinateOperationType.create(coordinate_operation_type) ] cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coordinate_operation_pj = _from_name( context, coordinate_operation_name, auth_name, pj_coordinate_operation_type, ) if coordinate_operation_pj == NULL: raise CRSError( "Invalid coordinate operation name: " f"{coordinate_operation_name}" ) _clear_proj_error() return CoordinateOperation.create(context, coordinate_operation_pj) @property def params(self): """ Returns ------- list[Param]: The coordinate operation parameters. """ if self._params is not None: return self._params self._params = [] cdef int num_params = 0 num_params = proj_coordoperation_get_param_count( self.context, self.projobj ) for param_idx from 0 <= param_idx < num_params: self._params.append( Param.create( self.context, self.projobj, param_idx ) ) _clear_proj_error() return self._params @property def grids(self): """ Returns ------- list[Grid]: The coordinate operation grids. """ if self._grids is not None: return self._grids self._grids = [] cdef int num_grids = 0 num_grids = proj_coordoperation_get_grid_used_count( self.context, self.projobj ) for grid_idx from 0 <= grid_idx < num_grids: self._grids.append( Grid.create( self.context, self.projobj, grid_idx ) ) _clear_proj_error() return self._grids @property def area_of_use(self): """ Returns ------- AreaOfUse: The area of use object with associated attributes. """ if self._area_of_use is not None: return self._area_of_use self._area_of_use = create_area_of_use(self.context, self.projobj) return self._area_of_use def to_proj4(self, version not None=ProjVersion.PROJ_5, bint pretty=False): """ Convert the projection to a PROJ string. .. versionadded:: 3.1.0 pretty Parameters ---------- version: pyproj.enums.ProjVersion, default=pyproj.enums.ProjVersion.PROJ_5 The version of the PROJ string output. pretty: bool, default=False If True, it will set the output to be a multiline string. Returns ------- str: The PROJ string. """ return _to_proj4(self.context, self.projobj, version=version, pretty=pretty) @property def towgs84(self): """ Returns ------- list[float]: A list of 3 or 7 towgs84 values if they exist. """ if self._towgs84 is not None: return self._towgs84 towgs84_dict = OrderedDict( ( ('X-axis translation', None), ('Y-axis translation', None), ('Z-axis translation', None), ('X-axis rotation', None), ('Y-axis rotation', None), ('Z-axis rotation', None), ('Scale difference', None), ) ) for param in self.params: if param.name in towgs84_dict: towgs84_dict[param.name] = param.value self._towgs84 = [val for val in towgs84_dict.values() if val is not None] return self._towgs84 @property def operations(self): """ .. versionadded:: 2.4.0 Returns ------- tuple[CoordinateOperation]: The operations in a concatenated operation. """ if self._operations is not None: return self._operations self._operations = _get_concatenated_operations(self.context, self.projobj) return self._operations def __repr__(self): return ( f"\n" f"Name: {self.name}\n" f"Method: {self.method_name}\n" f"Area of Use:\n{self.area_of_use or '- undefined'}" ) AuthorityMatchInfo = namedtuple( "AuthorityMatchInfo", [ "auth_name", "code", "confidence", ], ) AuthorityMatchInfo.__doc__ = """ .. versionadded:: 3.2.0 CRS Authority Match Information Parameters ---------- auth_name: str Authority name. code: str Object code. confidence: int Confidence that this CRS matches the authority and code. """ cdef dict _CRS_TYPE_MAP = { PJ_TYPE_UNKNOWN: "Unknown CRS", PJ_TYPE_CRS: "CRS", PJ_TYPE_GEODETIC_CRS: "Geodetic CRS", PJ_TYPE_GEOCENTRIC_CRS: "Geocentric CRS", PJ_TYPE_GEOGRAPHIC_CRS: "Geographic CRS", PJ_TYPE_GEOGRAPHIC_2D_CRS: "Geographic 2D CRS", PJ_TYPE_GEOGRAPHIC_3D_CRS: "Geographic 3D CRS", PJ_TYPE_VERTICAL_CRS: "Vertical CRS", PJ_TYPE_PROJECTED_CRS: "Projected CRS", PJ_TYPE_COMPOUND_CRS: "Compound CRS", PJ_TYPE_TEMPORAL_CRS: "Temporal CRS", PJ_TYPE_ENGINEERING_CRS: "Engineering CRS", PJ_TYPE_BOUND_CRS: "Bound CRS", PJ_TYPE_OTHER_CRS: "Other CRS", PJ_TYPE_DERIVED_PROJECTED_CRS: "Derived Projected CRS", } cdef class _CRS(Base): """ .. versionadded:: 2.0.0 The cython CRS class to be used as the base for the python CRS class. """ def __cinit__(self): self._context_manager = None self._ellipsoid = None self._area_of_use = None self._prime_meridian = None self._datum = None self._sub_crs_list = None self._source_crs = None self._target_crs = None self._geodetic_crs = None self._coordinate_system = None self._coordinate_operation = None self._type_name = None def __init__(self, str proj_string): self.context = pyproj_context_create() self._context_manager = get_context_manager() # initialize projection self.projobj = proj_create( self.context, cstrencode(proj_string), ) if self.projobj == NULL: raise CRSError(f"Invalid projection: {proj_string}") # make sure the input is a CRS if not proj_is_crs(self.projobj): raise CRSError(f"Input is not a CRS: {proj_string}") # set proj information self.srs = proj_string self._type = proj_get_type(self.projobj) self._set_base_info() _clear_proj_error() @property def type_name(self): """ Returns ------- str: The name of the type of the CRS object. """ if self._type_name is not None: return self._type_name self._type_name = _CRS_TYPE_MAP[self._type] if not self.is_derived or self._type in ( PJ_TYPE_PROJECTED_CRS, PJ_TYPE_DERIVED_PROJECTED_CRS, ): # Projected CRS are derived by definition # https://github.com/OSGeo/PROJ/issues/3525#issuecomment-1365790999 return self._type_name self._type_name = f"Derived {self._type_name}" return self._type_name @property def axis_info(self): """ Retrieves all relevant axis information in the CRS. If it is a Bound CRS, it gets the axis list from the Source CRS. If it is a Compound CRS, it gets the axis list from the Sub CRS list. Returns ------- list[Axis]: The list of axis information. """ axis_info_list = [] if self.coordinate_system: axis_info_list.extend(self.coordinate_system.axis_list) elif self.is_bound and self.source_crs: axis_info_list.extend(self.source_crs.axis_info) else: for sub_crs in self.sub_crs_list: axis_info_list.extend(sub_crs.axis_info) return axis_info_list @property def area_of_use(self): """ Returns ------- AreaOfUse: The area of use object with associated attributes. """ if self._area_of_use is not None: return self._area_of_use self._area_of_use = create_area_of_use(self.context, self.projobj) return self._area_of_use @property def ellipsoid(self): """ .. versionadded:: 2.2.0 Returns ------- Ellipsoid: The ellipsoid object with associated attributes. """ if self._ellipsoid is not None: return None if self._ellipsoid is False else self._ellipsoid cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* ellipsoid_pj = proj_get_ellipsoid( context, self.projobj ) _clear_proj_error() if ellipsoid_pj == NULL: self._ellipsoid = False return None self._ellipsoid = Ellipsoid.create(context, ellipsoid_pj) return self._ellipsoid @property def prime_meridian(self): """ .. versionadded:: 2.2.0 Returns ------- PrimeMeridian: The prime meridian object with associated attributes. """ if self._prime_meridian is not None: return None if self._prime_meridian is True else self._prime_meridian cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* prime_meridian_pj = proj_get_prime_meridian( context, self.projobj, ) _clear_proj_error() if prime_meridian_pj == NULL: self._prime_meridian = False return None self._prime_meridian = PrimeMeridian.create(context, prime_meridian_pj) return self._prime_meridian @property def datum(self): """ .. versionadded:: 2.2.0 Returns ------- Datum """ if self._datum is not None: return None if self._datum is False else self._datum cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* datum_pj = proj_crs_get_datum( context, self.projobj, ) if datum_pj == NULL: datum_pj = proj_crs_get_horizontal_datum( context, self.projobj, ) _clear_proj_error() if datum_pj == NULL: self._datum = False return None self._datum = Datum.create(context, datum_pj) return self._datum @property def coordinate_system(self): """ .. versionadded:: 2.2.0 Returns ------- CoordinateSystem """ if self._coordinate_system is not None: return None if self._coordinate_system is False else self._coordinate_system cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coord_system_pj = proj_crs_get_coordinate_system( context, self.projobj ) _clear_proj_error() if coord_system_pj == NULL: self._coordinate_system = False return None self._coordinate_system = CoordinateSystem.create( context, coord_system_pj, ) return self._coordinate_system @property def coordinate_operation(self): """ .. versionadded:: 2.2.0 Returns ------- CoordinateOperation """ if self._coordinate_operation is not None: return ( None if self._coordinate_operation is False else self._coordinate_operation ) if not ( self.is_bound or self.is_derived ): self._coordinate_operation = False return None cdef PJ_CONTEXT* context = pyproj_context_create() cdef PJ* coord_pj = proj_crs_get_coordoperation( context, self.projobj ) _clear_proj_error() if coord_pj == NULL: self._coordinate_operation = False return None self._coordinate_operation = CoordinateOperation.create( context, coord_pj, ) return self._coordinate_operation @property def source_crs(self): """ Returns ------- _CRS: The base CRS of a BoundCRS or a DerivedCRS/ProjectedCRS. """ if self._source_crs is not None: return None if self._source_crs is False else self._source_crs cdef PJ * projobj = proj_get_source_crs(self.context, self.projobj) _clear_proj_error() if projobj == NULL: self._source_crs = False return None try: self._source_crs = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return self._source_crs @property def target_crs(self): """ .. versionadded:: 2.2.0 Returns ------- _CRS: The hub CRS of a BoundCRS. """ if self._target_crs is not None: return None if self._target_crs is False else self._target_crs cdef PJ * projobj = proj_get_target_crs(self.context, self.projobj) _clear_proj_error() if projobj == NULL: self._target_crs = False return None try: self._target_crs = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return self._target_crs @property def sub_crs_list(self): """ If the CRS is a compound CRS, it will return a list of sub CRS objects. Returns ------- list[_CRS] """ if self._sub_crs_list is not None: return self._sub_crs_list if not self.is_compound: self._sub_crs_list = [] return self._sub_crs_list cdef int iii = 0 cdef PJ * projobj = proj_crs_get_sub_crs( self.context, self.projobj, iii, ) self._sub_crs_list = [] while projobj != NULL: try: self._sub_crs_list.append(_CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, ))) finally: proj_destroy(projobj) # deallocate temp proj iii += 1 projobj = proj_crs_get_sub_crs( self.context, self.projobj, iii, ) _clear_proj_error() return self._sub_crs_list @property def geodetic_crs(self): """ .. versionadded:: 2.2.0 The geodeticCRS / geographicCRS from the CRS. Returns ------- _CRS """ if self._geodetic_crs is not None: return self._geodetic_crs if self. _geodetic_crs is not False else None cdef PJ * projobj = proj_crs_get_geodetic_crs(self.context, self.projobj) _clear_proj_error() if projobj == NULL: self._geodetic_crs = False return None try: self._geodetic_crs = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) # deallocate temp proj return self._geodetic_crs def to_proj4(self, version=ProjVersion.PROJ_4): """ Convert the projection to a PROJ string. .. warning:: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501 Parameters ---------- version: pyproj.enums.ProjVersion, default=pyproj.enums.ProjVersion.PROJ_4 The version of the PROJ string output. Returns ------- str """ warnings.warn( "You will likely lose important projection information when " "converting to a PROJ string from another format. See: " "https://proj.org/faq.html#what-is-the-best-format-for-describing-" "coordinate-reference-systems" ) return _to_proj4(self.context, self.projobj, version=version, pretty=False) def to_epsg(self, int min_confidence=70): """ Return the EPSG code best matching the CRS or None if it a match is not found. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.to_epsg() 4328 If the CRS is bound, you can attempt to get an epsg code from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.to_epsg() >>> ccs.source_crs.to_epsg() 4978 >>> ccs == CRS.from_epsg(4978) False Parameters ---------- min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- int | None: The best matching EPSG code matching the confidence level. """ auth_info = self.to_authority( auth_name="EPSG", min_confidence=min_confidence ) if auth_info is not None and auth_info[0].upper() == "EPSG": return int(auth_info[1]) return None def to_authority(self, str auth_name=None, int min_confidence=70): """ .. versionadded:: 2.2.0 Return the authority name and code best matching the CRS or None if it a match is not found. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.to_authority() ('EPSG', '4328') If the CRS is bound, you can get an authority from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.to_authority() >>> ccs.source_crs.to_authority() ('EPSG', '4978') >>> ccs == CRS.from_authorty('EPSG', '4978') False Parameters ---------- auth_name: str, optional The name of the authority to filter by. min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- tuple(str, str) or None: The best matching (, ) for the confidence level. """ try: authority = self.list_authority( auth_name=auth_name, min_confidence=min_confidence, )[0] return authority.auth_name, authority.code except IndexError: return None def list_authority(self, str auth_name=None, int min_confidence=70): """ .. versionadded:: 3.2.0 Return the authority names and codes best matching the CRS. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.list_authority() [AuthorityMatchInfo(auth_name='EPSG', code='4326', confidence=100)] If the CRS is bound, you can get an authority from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.list_authority() [] >>> ccs.source_crs.list_authority() [AuthorityMatchInfo(auth_name='EPSG', code='4978', confidence=70)] >>> ccs == CRS.from_authorty('EPSG', '4978') False Parameters ---------- auth_name: str, optional The name of the authority to filter by. min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- list[AuthorityMatchInfo]: List of authority matches for the CRS. """ # get list of possible matching projections cdef PJ_OBJ_LIST *proj_list = NULL cdef int *c_out_confidence_list = NULL cdef int num_proj_objects = -9999 cdef bytes b_auth_name cdef char *user_auth_name = NULL cdef int iii = 0 if auth_name is not None: b_auth_name = cstrencode(auth_name) user_auth_name = b_auth_name out_confidence_list = [] try: proj_list = proj_identify( self.context, self.projobj, user_auth_name, NULL, &c_out_confidence_list ) if proj_list != NULL: num_proj_objects = proj_list_get_count(proj_list) if c_out_confidence_list != NULL and num_proj_objects > 0: out_confidence_list = [ c_out_confidence_list[iii] for iii in range(num_proj_objects) ] finally: if c_out_confidence_list != NULL: proj_int_list_destroy(c_out_confidence_list) _clear_proj_error() # retrieve the best matching projection cdef PJ* proj = NULL cdef const char* code cdef const char* out_auth_name authority_list = [] try: for iii in range(num_proj_objects): if out_confidence_list[iii] < min_confidence: continue proj = proj_list_get(self.context, proj_list, iii) code = proj_get_id_code(proj, 0) out_auth_name = proj_get_id_auth_name(proj, 0) if out_auth_name != NULL and code != NULL: authority_list.append( AuthorityMatchInfo( out_auth_name, code, out_confidence_list[iii] ) ) # at this point, the auth name is copied and we can release the proj object proj_destroy(proj) proj = NULL finally: # If there was an error we have to call proj_destroy # If there was none, calling it on NULL does nothing proj_destroy(proj) proj_list_destroy(proj_list) _clear_proj_error() return authority_list def to_3d(self, str name=None): """ .. versionadded:: 3.1.0 Convert the current CRS to the 3D version if it makes sense. New vertical axis attributes: - ellipsoidal height - oriented upwards - metre units Parameters ---------- name: str, optional CRS name. If None, it will use the name of the original CRS. Returns ------- _CRS """ cdef char* c_name = NULL cdef bytes b_name if name is not None: b_name = cstrencode(name) c_name = b_name cdef PJ * projobj = proj_crs_promote_to_3D( self.context, c_name, self.projobj ) _clear_proj_error() if projobj == NULL: return self try: crs_3d = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return crs_3d def to_2d(self, str name=None): """ .. versionadded:: 3.6.0 Convert the current CRS to the 2D version if it makes sense. Parameters ---------- name: str, optional CRS name. If None, it will use the name of the original CRS. Returns ------- _CRS """ cdef char* c_name = NULL cdef bytes b_name if name is not None: b_name = cstrencode(name) c_name = b_name cdef PJ * projobj = proj_crs_demote_to_2D( self.context, c_name, self.projobj ) _clear_proj_error() if projobj == NULL: return self try: crs_2d = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return crs_2d def _is_crs_property( self, str property_name, tuple property_types, int sub_crs_index=0 ): """ .. versionadded:: 2.2.0 This method will check for a property on the CRS. It will check if it has the property on the sub CRS if it is a compound CRS and will check if the source CRS has the property if it is a bound CRS. Parameters ---------- property_name: str The name of the CRS property. property_types: tuple(PJ_TYPE) The types to check for for the property. sub_crs_index: int, default=0 THe index of the CRS in the sub CRS list. Returns ------- bool: True if the CRS has this property. """ if self.sub_crs_list: sub_crs = self.sub_crs_list[sub_crs_index] if sub_crs.is_bound: is_property = getattr(sub_crs.source_crs, property_name) else: is_property = getattr(sub_crs, property_name) elif self.is_bound: is_property = getattr(self.source_crs, property_name) else: is_property = self._type in property_types return is_property @property def is_geographic(self): """ This checks if the CRS is geographic. It will check if it has a geographic CRS in the sub CRS if it is a compound CRS and will check if the source CRS is geographic if it is a bound CRS. Returns ------- bool: True if the CRS is in geographic (lon/lat) coordinates. """ return self._is_crs_property( "is_geographic", ( PJ_TYPE_GEOGRAPHIC_CRS, PJ_TYPE_GEOGRAPHIC_2D_CRS, PJ_TYPE_GEOGRAPHIC_3D_CRS ) ) @property def is_projected(self): """ This checks if the CRS is projected. It will check if it has a projected CRS in the sub CRS if it is a compound CRS and will check if the source CRS is projected if it is a bound CRS. Returns ------- bool: True if CRS is projected. """ return self._is_crs_property( "is_projected", (PJ_TYPE_PROJECTED_CRS,) ) @property def is_vertical(self): """ .. versionadded:: 2.2.0 This checks if the CRS is vertical. It will check if it has a vertical CRS in the sub CRS if it is a compound CRS and will check if the source CRS is vertical if it is a bound CRS. Returns ------- bool: True if CRS is vertical. """ return self._is_crs_property( "is_vertical", (PJ_TYPE_VERTICAL_CRS,), sub_crs_index=1 ) @property def is_bound(self): """ Returns ------- bool: True if CRS is bound. """ return self._type == PJ_TYPE_BOUND_CRS @property def is_compound(self): """ .. versionadded:: 3.1.0 Returns ------- bool: True if CRS is compound. """ return self._type == PJ_TYPE_COMPOUND_CRS @property def is_engineering(self): """ .. versionadded:: 2.2.0 Returns ------- bool: True if CRS is local/engineering. """ return self._type == PJ_TYPE_ENGINEERING_CRS @property def is_geocentric(self): """ This checks if the CRS is geocentric and takes into account if the CRS is bound. Returns ------- bool: True if CRS is in geocentric (x/y) coordinates. """ if self.is_bound: return self.source_crs.is_geocentric return self._type == PJ_TYPE_GEOCENTRIC_CRS @property def is_derived(self): """ .. versionadded:: 3.2.0 Returns ------- bool: True if CRS is a Derived CRS. """ return proj_is_derived_crs(self.context, self.projobj) == 1 def _equals(self, _CRS other, bint ignore_axis_order): if ignore_axis_order: # Only to be used with DerivedCRS/ProjectedCRS/GeographicCRS return proj_is_equivalent_to_with_ctx( self.context, self.projobj, other.projobj, PJ_COMP_EQUIVALENT_EXCEPT_AXIS_ORDER_GEOGCRS, ) == 1 return self._is_equivalent(other) def equals(self, other, ignore_axis_order=False): """ Check if the projection objects are equivalent. Properties ---------- other: CRS Check if the other object ignore_axis_order: bool, default=False If True, it will compare the CRS class and ignore the axis order. Returns ------- bool """ if not isinstance(other, _CRS): return False return self._equals(other, ignore_axis_order=ignore_axis_order) @property def is_deprecated(self): """ .. versionadded:: 3.7.0 Check if the CRS is deprecated Returns ------- bool """ return bool(proj_is_deprecated(self.projobj)) def get_non_deprecated(self): """ .. versionadded:: 3.7.0 Return a list of non-deprecated objects related to this. Returns ------- list[_CRS] """ non_deprecated = [] cdef PJ_OBJ_LIST *proj_list = NULL cdef int num_proj_objects = 0 proj_list = proj_get_non_deprecated( self.context, self.projobj ) if proj_list != NULL: num_proj_objects = proj_list_get_count(proj_list) cdef PJ* proj = NULL try: for iii in range(num_proj_objects): proj = proj_list_get(self.context, proj_list, iii) non_deprecated.append(_CRS(_to_wkt( self.context, proj, version=WktVersion.WKT2_2019, pretty=False, ))) proj_destroy(proj) proj = NULL finally: # If there was an error we have to call proj_destroy # If there was none, calling it on NULL does nothing proj_destroy(proj) proj_list_destroy(proj_list) _clear_proj_error() return non_deprecated pyproj-3.7.1/pyproj/_geod.pxd000066400000000000000000000033471475425760300162060ustar00rootroot00000000000000cdef extern from "geodesic.h" nogil: struct geod_geodesic: pass struct geod_geodesicline: double lat1 double lon1 double azi1 double a double f double salp1 double calp1 double a13 double s13 unsigned caps void geod_init(geod_geodesic* g, double a, double f) void geod_direct( geod_geodesic* g, double lat1, double lon1, double azi1, double s12, double* plat2, double* plon2, double* pazi2) void geod_inverse( geod_geodesic* g, double lat1, double lon1, double lat2, double lon2, double* ps12, double* pazi1, double* pazi2) void geod_lineinit( geod_geodesicline* l, const geod_geodesic* g, double lat1, double lon1, double azi1, unsigned caps) void geod_inverseline( geod_geodesicline* l, const geod_geodesic* g, double lat1, double lon1, double lat2, double lon2, unsigned caps) void geod_position( geod_geodesicline* l, double s12, double* plat2, double* plon2, double* pazi2) void geod_polygonarea( geod_geodesic* g, double lats[], double lons[], int n, double* pA, double* pP) cdef enum: GEODESIC_VERSION_MAJOR GEODESIC_VERSION_MINOR GEODESIC_VERSION_PATCH cdef class Geod: cdef geod_geodesic _geod_geodesic cdef readonly object initstring cdef readonly double a cdef readonly double b cdef readonly double f cdef readonly double es cdef readonly bint sphere pyproj-3.7.1/pyproj/_geod.pyi000066400000000000000000000037661475425760300162210ustar00rootroot00000000000000from typing import Any, NamedTuple geodesic_version_str: str class GeodIntermediateReturn(NamedTuple): npts: int del_s: float dist: float lons: Any lats: Any azis: Any class Geod: initstring: str a: float b: float f: float es: float sphere: bool def __init__( self, a: float, f: float, sphere: bool, b: float, es: float ) -> None: ... def __reduce__(self) -> tuple[type[Geod], str]: ... def _fwd( self, lons: Any, lats: Any, az: Any, dist: Any, radians: bool = False, return_back_azimuth: bool = True, ) -> None: ... def _fwd_point( self, lons: float, lats: float, az: float, dist: float, radians: bool = False, return_back_azimuth: bool = True, ) -> tuple[float, float, float]: ... def _inv( self, lons1: Any, lats1: Any, lons2: Any, lats2: Any, radians: bool = False, return_back_azimuth: bool = False, ) -> None: ... def _inv_point( self, lons1: float, lats1: float, lons2: float, lats2: float, radians: bool = False, return_back_azimuth: bool = False, ) -> tuple[float, float, float]: ... def _inv_or_fwd_intermediate( self, lon1: float, lat1: float, lon2_or_azi1: float, lat2: float, npts: int, del_s: float, radians: bool, initial_idx: int, terminus_idx: int, flags: int, out_lons: Any, out_lats: Any, out_azis: Any, return_back_azimuth: bool, is_fwd: bool, ) -> GeodIntermediateReturn: ... def _line_length(self, lons: Any, lats: Any, radians: bool = False) -> float: ... def _polygon_area_perimeter( self, lons: Any, lats: Any, radians: bool = False ) -> tuple[float, float]: ... def reverse_azimuth(azi: Any, radians: bool = False) -> None: ... pyproj-3.7.1/pyproj/_geod.pyx000066400000000000000000000504221475425760300162270ustar00rootroot00000000000000include "base.pxi" cimport cython from libc.math cimport ceil, isnan, round from pyproj._compat cimport cstrencode, empty_array import math from collections import namedtuple from pyproj.enums import GeodIntermediateFlag from pyproj.exceptions import GeodError geodesic_version_str = ( f"{GEODESIC_VERSION_MAJOR}.{GEODESIC_VERSION_MINOR}.{GEODESIC_VERSION_PATCH}" ) GeodIntermediateReturn = namedtuple( "GeodIntermediateReturn", ["npts", "del_s", "dist", "lons", "lats", "azis"] ) GeodIntermediateReturn.__doc__ = """ .. versionadded:: 3.1.0 Geod Intermediate Return value (Named Tuple) Parameters ---------- npts: int number of points del_s: float delimiter distance between two successive points dist: float distance between the initial and terminus points out_lons: Any array of the output lons out_lats: Any array of the output lats out_azis: Any array of the output azis """ cdef: int GEOD_INTER_FLAG_DEFAULT = GeodIntermediateFlag.DEFAULT int GEOD_INTER_FLAG_NPTS_MASK = ( GeodIntermediateFlag.NPTS_ROUND | GeodIntermediateFlag.NPTS_CEIL | GeodIntermediateFlag.NPTS_TRUNC ) int GEOD_INTER_FLAG_NPTS_ROUND = GeodIntermediateFlag.NPTS_ROUND int GEOD_INTER_FLAG_NPTS_CEIL = GeodIntermediateFlag.NPTS_CEIL int GEOD_INTER_FLAG_NPTS_TRUNC = GeodIntermediateFlag.NPTS_TRUNC int GEOD_INTER_FLAG_DEL_S_MASK = ( GeodIntermediateFlag.DEL_S_RECALC | GeodIntermediateFlag.DEL_S_NO_RECALC ) int GEOD_INTER_FLAG_DEL_S_RECALC = GeodIntermediateFlag.DEL_S_RECALC int GEOD_INTER_FLAG_DEL_S_NO_RECALC = GeodIntermediateFlag.DEL_S_NO_RECALC int GEOD_INTER_FLAG_AZIS_MASK = ( GeodIntermediateFlag.AZIS_DISCARD | GeodIntermediateFlag.AZIS_KEEP ) int GEOD_INTER_FLAG_AZIS_DISCARD = GeodIntermediateFlag.AZIS_DISCARD int GEOD_INTER_FLAG_AZIS_KEEP = GeodIntermediateFlag.AZIS_KEEP cdef double _reverse_azimuth(double azi, double factor) nogil: if azi > 0: azi = azi - factor else: azi = azi + factor return azi def reverse_azimuth(object azi, bint radians=False): cdef PyBuffWriteManager azibuff = PyBuffWriteManager(azi) cdef Py_ssize_t iii cdef double factor = 180 if radians: factor = math.pi with nogil: for iii in range(azibuff.len): azibuff.data[iii] = _reverse_azimuth(azibuff.data[iii], factor=factor) cdef class Geod: def __init__(self, double a, double f, bint sphere, double b, double es): geod_init(&self._geod_geodesic, a, f) self.a = a self.f = f self.initstring = f"+{a=} +{f=}" self.sphere = sphere self.b = b self.es = es def __reduce__(self): """special method that allows pyproj.Geod instance to be pickled""" return self.__class__, (self.initstring,) @cython.boundscheck(False) @cython.wraparound(False) def _fwd( self, object lons, object lats, object az, object dist, bint radians=False, bint return_back_azimuth=True, ): """ forward transformation - determine longitude, latitude and back azimuth of a terminus point given an initial point longitude and latitude, plus forward azimuth and distance. if radians=True, lons/lats are radians instead of degrees. if return_back_azimuth=True, the return azimuth will be the forward azimuth instead of the forward azimuth. """ cdef: PyBuffWriteManager lonbuff = PyBuffWriteManager(lons) PyBuffWriteManager latbuff = PyBuffWriteManager(lats) PyBuffWriteManager azbuff = PyBuffWriteManager(az) PyBuffWriteManager distbuff = PyBuffWriteManager(dist) # process data in buffer if not lonbuff.len == latbuff.len == azbuff.len == distbuff.len: raise GeodError("Array lengths are not the same.") cdef: double lat1 double lon1 double az1 double s12 double plon2 double plat2 double pazi2 Py_ssize_t iii with nogil: for iii in range(lonbuff.len): if not radians: lon1 = lonbuff.data[iii] lat1 = latbuff.data[iii] az1 = azbuff.data[iii] s12 = distbuff.data[iii] else: lon1 = _RAD2DG * lonbuff.data[iii] lat1 = _RAD2DG * latbuff.data[iii] az1 = _RAD2DG * azbuff.data[iii] s12 = distbuff.data[iii] geod_direct( &self._geod_geodesic, lat1, lon1, az1, s12, &plat2, &plon2, &pazi2, ) # by default (return_back_azimuth=True), # forward azimuth needs to be flipped 180 degrees # to match the (back azimuth) output of PROJ geod utilities. if return_back_azimuth: pazi2 = _reverse_azimuth(pazi2, factor=180) if not radians: lonbuff.data[iii] = plon2 latbuff.data[iii] = plat2 azbuff.data[iii] = pazi2 else: lonbuff.data[iii] = _DG2RAD * plon2 latbuff.data[iii] = _DG2RAD * plat2 azbuff.data[iii] = _DG2RAD * pazi2 @cython.boundscheck(False) @cython.wraparound(False) def _fwd_point( self, object lon1in, object lat1in, object az1in, object s12in, bint radians=False, bint return_back_azimuth=True, ): """ Scalar optimized function forward transformation - determine longitude, latitude and back azimuth of a terminus point given an initial point longitude and latitude, plus forward azimuth and distance. if radians=True, lons/lats are radians instead of degrees. """ cdef: double plon2 double plat2 double pazi2 double lon1 = lon1in double lat1 = lat1in double az1 = az1in double s12 = s12in # We do the type-checking internally here due to automatically # casting length-1 arrays to float that we don't want to return scalar for. # Ex: float(np.array([0])) works and we don't want to accept numpy arrays for x_in in (lon1in, lat1in, az1in, s12in): if not isinstance(x_in, (float, int)): raise TypeError("Scalar input is required for point based functions") with nogil: if radians: lon1 = _RAD2DG * lon1 lat1 = _RAD2DG * lat1 az1 = _RAD2DG * az1 geod_direct( &self._geod_geodesic, lat1, lon1, az1, s12, &plat2, &plon2, &pazi2, ) # back azimuth needs to be flipped 180 degrees # to match what PROJ geod utility produces. if return_back_azimuth: pazi2 =_reverse_azimuth(pazi2, factor=180) if radians: plon2 = _DG2RAD * plon2 plat2 = _DG2RAD * plat2 pazi2 = _DG2RAD * pazi2 return plon2, plat2, pazi2 @cython.boundscheck(False) @cython.wraparound(False) def _inv( self, object lons1, object lats1, object lons2, object lats2, bint radians=False, bint return_back_azimuth=True, ): """ inverse transformation - return forward azimuth (azi12) and back azimuths (azi21), plus distance between an initial and terminus lat/lon pair. if radians=True, lons/lats are radians instead of degree if return_back_azimuth=True, azi21 is a back azimuth (180 degrees flipped), otherwise azi21 is also a forward azimuth. """ cdef: PyBuffWriteManager lon1buff = PyBuffWriteManager(lons1) PyBuffWriteManager lat1buff = PyBuffWriteManager(lats1) PyBuffWriteManager lon2buff = PyBuffWriteManager(lons2) PyBuffWriteManager lat2buff = PyBuffWriteManager(lats2) # process data in buffer if not lon1buff.len == lat1buff.len == lon2buff.len == lat2buff.len: raise GeodError("Array lengths are not the same.") cdef: double lat1 double lon1 double lat2 double lon2 double pazi1 double pazi2 double ps12 Py_ssize_t iii with nogil: for iii in range(lon1buff.len): if radians: lon1 = _RAD2DG * lon1buff.data[iii] lat1 = _RAD2DG * lat1buff.data[iii] lon2 = _RAD2DG * lon2buff.data[iii] lat2 = _RAD2DG * lat2buff.data[iii] else: lon1 = lon1buff.data[iii] lat1 = lat1buff.data[iii] lon2 = lon2buff.data[iii] lat2 = lat2buff.data[iii] geod_inverse( &self._geod_geodesic, lat1, lon1, lat2, lon2, &ps12, &pazi1, &pazi2, ) # by default (return_back_azimuth=True), # forward azimuth needs to be flipped 180 degrees # to match the (back azimuth) output of PROJ geod utilities. if return_back_azimuth: pazi2 = _reverse_azimuth(pazi2, factor=180) if radians: lon1buff.data[iii] = _DG2RAD * pazi1 lat1buff.data[iii] = _DG2RAD * pazi2 else: lon1buff.data[iii] = pazi1 lat1buff.data[iii] = pazi2 # write azimuth data into lon2 buffer lon2buff.data[iii] = ps12 @cython.boundscheck(False) @cython.wraparound(False) def _inv_point( self, object lon1in, object lat1in, object lon2in, object lat2in, bint radians=False, bint return_back_azimuth=True, ): """ Scalar optimized function inverse transformation - return forward and back azimuth, plus distance between an initial and terminus lat/lon pair. if radians=True, lons/lats are radians instead of degree """ cdef: double pazi1 double pazi2 double ps12 double lon1 = lon1in double lat1 = lat1in double lon2 = lon2in double lat2 = lat2in # We do the type-checking internally here due to automatically # casting length-1 arrays to float that we don't want to return scalar for. # Ex: float(np.array([0])) works and we don't want to accept numpy arrays for x_in in (lon1in, lat1in, lon2in, lat2in): if not isinstance(x_in, (float, int)): raise TypeError("Scalar input is required for point based functions") with nogil: if radians: lon1 = _RAD2DG * lon1 lat1 = _RAD2DG * lat1 lon2 = _RAD2DG * lon2 lat2 = _RAD2DG * lat2 geod_inverse( &self._geod_geodesic, lat1, lon1, lat2, lon2, &ps12, &pazi1, &pazi2, ) # back azimuth needs to be flipped 180 degrees # to match what proj4 geod utility produces. if return_back_azimuth: pazi2 =_reverse_azimuth(pazi2, factor=180) if radians: pazi1 = _DG2RAD * pazi1 pazi2 = _DG2RAD * pazi2 return pazi1, pazi2, ps12 @cython.boundscheck(False) @cython.wraparound(False) def _inv_or_fwd_intermediate( self, double lon1, double lat1, double lon2_or_azi1, double lat2, int npts, double del_s, bint radians, int initial_idx, int terminus_idx, int flags, object out_lons, object out_lats, object out_azis, bint return_back_azimuth, bint is_fwd, ) -> GeodIntermediateReturn: """ .. versionadded:: 3.1.0 given initial and terminus lat/lon, find npts intermediate points. using given lons, lats buffers """ cdef: Py_ssize_t iii double pazi2 double s12 double plon2 double plat2 geod_geodesicline line bint store_az = ( out_azis is not None or (flags & GEOD_INTER_FLAG_AZIS_MASK) == GEOD_INTER_FLAG_AZIS_KEEP ) PyBuffWriteManager lons_buff PyBuffWriteManager lats_buff PyBuffWriteManager azis_buff if not is_fwd and (del_s == 0) == (npts == 0): raise GeodError("inv_intermediate: " "npts and del_s are mutually exclusive, " "only one of them must be != 0.") with nogil: if radians: lon1 *= _RAD2DG lat1 *= _RAD2DG lon2_or_azi1 *= _RAD2DG if not is_fwd: lat2 *= _RAD2DG if is_fwd: # do fwd computation to set azimuths, distance. geod_lineinit(&line, &self._geod_geodesic, lat1, lon1, lon2_or_azi1, 0u) line.s13 = del_s * (npts + initial_idx + terminus_idx - 1) else: # do inverse computation to set azimuths, distance. geod_inverseline(&line, &self._geod_geodesic, lat1, lon1, lat2, lon2_or_azi1, 0u) if npts == 0: # calc the number of required points by the distance increment # s12 holds a temporary float value of npts (just reusing this var) s12 = line.s13 / del_s - initial_idx - terminus_idx + 1 if (flags & GEOD_INTER_FLAG_NPTS_MASK) == \ GEOD_INTER_FLAG_NPTS_ROUND: s12 = round(s12) elif (flags & GEOD_INTER_FLAG_NPTS_MASK) == \ GEOD_INTER_FLAG_NPTS_CEIL: s12 = ceil(s12) npts = int(s12) if (flags & GEOD_INTER_FLAG_DEL_S_MASK) == GEOD_INTER_FLAG_DEL_S_RECALC: # calc the distance increment by the number of required points del_s = line.s13 / (npts + initial_idx + terminus_idx - 1) with gil: if out_lons is None: out_lons = empty_array(npts) if out_lats is None: out_lats = empty_array(npts) if out_azis is None and store_az: out_azis = empty_array(npts) lons_buff = PyBuffWriteManager(out_lons) lats_buff = PyBuffWriteManager(out_lats) if store_az: azis_buff = PyBuffWriteManager(out_azis) if lons_buff.len < npts \ or lats_buff.len < npts \ or (store_az and azis_buff.len < npts): raise GeodError( "Arrays are not long enough (" f"{lons_buff.len}, {lats_buff.len}, " f"{azis_buff.len if store_az else -1}) < {npts}.") # loop over intermediate points, compute lat/lons. for iii in range(0, npts): s12 = (iii + initial_idx) * del_s geod_position(&line, s12, &plat2, &plon2, &pazi2) if radians: plat2 *= _DG2RAD plon2 *= _DG2RAD lats_buff.data[iii] = plat2 lons_buff.data[iii] = plon2 if store_az: # by default (return_back_azimuth=True), # forward azimuth needs to be flipped 180 degrees # to match the (back azimuth) output of PROJ geod utilities. if return_back_azimuth: pazi2 =_reverse_azimuth(pazi2, factor=180) azis_buff.data[iii] = pazi2 return GeodIntermediateReturn( npts, del_s, line.s13, out_lons, out_lats, out_azis) @cython.boundscheck(False) @cython.wraparound(False) def _line_length(self, object lons, object lats, bint radians=False): """ Calculate the distance between points along a line. Parameters ---------- lons: array The longitude points along a line. lats: array The latitude points along a line. radians: bool, default=False If True, the input data is assumed to be in radians. Returns ------- float: The total distance. """ cdef PyBuffWriteManager lonbuff = PyBuffWriteManager(lons) cdef PyBuffWriteManager latbuff = PyBuffWriteManager(lats) # process data in buffer if lonbuff.len != latbuff.len: raise GeodError("Array lengths are not the same.") if lonbuff.len == 1: lonbuff.data[0] = 0 return 0.0 cdef: double lat1 double lon1 double lat2 double lon2 double pazi1 double pazi2 double ps12 double total_distance = 0.0 Py_ssize_t iii with nogil: for iii in range(lonbuff.len - 1): if radians: lon1 = _RAD2DG * lonbuff.data[iii] lat1 = _RAD2DG * latbuff.data[iii] lon2 = _RAD2DG * lonbuff.data[iii + 1] lat2 = _RAD2DG * latbuff.data[iii + 1] else: lon1 = lonbuff.data[iii] lat1 = latbuff.data[iii] lon2 = lonbuff.data[iii + 1] lat2 = latbuff.data[iii + 1] geod_inverse( &self._geod_geodesic, lat1, lon1, lat2, lon2, &ps12, &pazi1, &pazi2, ) lonbuff.data[iii] = ps12 total_distance += ps12 return total_distance @cython.boundscheck(False) @cython.wraparound(False) def _polygon_area_perimeter(self, object lons, object lats, bint radians=False): """ A simple interface for computing the area of a geodesic polygon. lats should be in the range [-90 deg, 90 deg]. Only simple polygons (which are not self-intersecting) are allowed. There's no need to "close" the polygon by repeating the first vertex. The area returned is signed with counter-clockwise traversal being treated as positive. Parameters ---------- lons: array An array of longitude values. lats: array An array of latitude values. radians: bool, default=False If True, the input data is assumed to be in radians. Returns ------- (float, float): The area (meter^2) and perimeter (meters) of the polygon. """ cdef PyBuffWriteManager lonbuff = PyBuffWriteManager(lons) cdef PyBuffWriteManager latbuff = PyBuffWriteManager(lats) # process data in buffer if not lonbuff.len == latbuff.len: raise GeodError("Array lengths are not the same.") cdef double polygon_area cdef double polygon_perimeter cdef Py_ssize_t iii with nogil: if radians: for iii in range(lonbuff.len): lonbuff.data[iii] *= _RAD2DG latbuff.data[iii] *= _RAD2DG geod_polygonarea( &self._geod_geodesic, latbuff.data, lonbuff.data, lonbuff.len, &polygon_area, &polygon_perimeter ) return (polygon_area, polygon_perimeter) def __repr__(self): return f"{self.__class__.__name__}({self.initstring!r})" pyproj-3.7.1/pyproj/_network.pyi000066400000000000000000000001471475425760300167620ustar00rootroot00000000000000def set_network_enabled(active: bool | None = None) -> None: ... def is_network_enabled() -> bool: ... pyproj-3.7.1/pyproj/_network.pyx000066400000000000000000000026441475425760300170050ustar00rootroot00000000000000include "proj.pxi" import os from pyproj.utils import strtobool from pyproj._compat cimport cstrencode from pyproj._context cimport pyproj_context_create from pyproj._context import _set_context_network_enabled def set_network_enabled(active=None): """ .. versionadded:: 3.0.0 Set whether PROJ network is enabled by default. This has the same behavior as the `PROJ_NETWORK` environment variable. See: :c:func:`proj_context_set_enable_network` Parameters ---------- active: bool, optional Default is None, which uses the system defaults for networking. If True, it will force the use of network for grids regardless of any other network setting. If False, it will force disable use of network for grids regardless of any other network setting. """ if active is None: # in the case of the global context, need to reset network # setting based on the environment variable every time if None # because it could have been changed by the user previously active = strtobool(os.environ.get("PROJ_NETWORK", "OFF")) _set_context_network_enabled(bool(active)) def is_network_enabled(): """ .. versionadded:: 3.0.0 See: :c:func:`proj_context_is_network_enabled` Returns ------- bool: If PROJ network is enabled by default. """ return proj_context_is_network_enabled(pyproj_context_create()) == 1 pyproj-3.7.1/pyproj/_show_versions.py000066400000000000000000000057251475425760300200370ustar00rootroot00000000000000""" Utility methods to print system info for debugging adapted from :func:`sklearn.utils._show_versions` which was adapted from :func:`pandas.show_versions` """ import importlib.metadata import platform import sys def _get_sys_info(): """System information Return ------ sys_info : dict system and Python version information """ blob = [ ("python", sys.version.replace("\n", " ")), ("executable", sys.executable), ("machine", platform.platform()), ] return dict(blob) def _get_proj_info(): """Information on system PROJ Returns ------- proj_info: dict system PROJ information """ # pylint: disable=import-outside-toplevel import pyproj from pyproj.database import get_database_metadata from pyproj.exceptions import DataDirError try: data_dir = pyproj.datadir.get_data_dir() except DataDirError: data_dir = None blob = [ ("pyproj", pyproj.__version__), ("PROJ (runtime)", pyproj.__proj_version__), ("PROJ (compiled)", pyproj.__proj_compiled_version__), ("data dir", data_dir), ("user_data_dir", pyproj.datadir.get_user_data_dir()), ("PROJ DATA (recommended version)", get_database_metadata("PROJ_DATA.VERSION")), ( "PROJ Database", f"{get_database_metadata('DATABASE.LAYOUT.VERSION.MAJOR')}." f"{get_database_metadata('DATABASE.LAYOUT.VERSION.MINOR')}", ), ( "EPSG Database", f"{get_database_metadata('EPSG.VERSION')} " f"[{get_database_metadata('EPSG.DATE')}]", ), ( "ESRI Database", f"{get_database_metadata('ESRI.VERSION')} " f"[{get_database_metadata('ESRI.DATE')}]", ), ( "IGNF Database", f"{get_database_metadata('IGNF.VERSION')} " f"[{get_database_metadata('IGNF.DATE')}]", ), ] return dict(blob) def _get_deps_info(): """Overview of the installed version of main dependencies Returns ------- deps_info: dict version information on relevant Python libraries """ deps = ["certifi", "Cython", "setuptools", "pip"] def get_version(module): try: return importlib.metadata.version(module) except importlib.metadata.PackageNotFoundError: return None return {dep: get_version(dep) for dep in deps} def _print_info_dict(info_dict): """Print the information dictionary""" for key, stat in info_dict.items(): print(f"{key:>10}: {stat}") def show_versions(): """ .. versionadded:: 2.2.1 Print useful debugging information Example ------- > python -c "import pyproj; pyproj.show_versions()" """ print("pyproj info:") _print_info_dict(_get_proj_info()) print("\nSystem:") _print_info_dict(_get_sys_info()) print("\nPython deps:") _print_info_dict(_get_deps_info()) pyproj-3.7.1/pyproj/_sync.pyi000066400000000000000000000000441475425760300162410ustar00rootroot00000000000000def get_proj_endpoint() -> str: ... pyproj-3.7.1/pyproj/_sync.pyx000066400000000000000000000004251475425760300162630ustar00rootroot00000000000000include "proj.pxi" from pyproj._context cimport pyproj_context_create def get_proj_endpoint() -> str: """ Returns ------- str: URL of the endpoint where PROJ grids are stored. """ return proj_context_get_url_endpoint(pyproj_context_create()) pyproj-3.7.1/pyproj/_transformer.pxd000066400000000000000000000012131475425760300176200ustar00rootroot00000000000000include "proj.pxi" from pyproj._crs cimport _CRS, Base cdef class _TransformerGroup: cdef PJ_CONTEXT* context cdef readonly object _context_manager cdef readonly list _transformers cdef readonly list _unavailable_operations cdef readonly list _best_available cdef class _Transformer(Base): cdef PJ_PROJ_INFO proj_info cdef readonly _area_of_use cdef readonly str type_name cdef readonly tuple _operations cdef readonly _CRS _source_crs cdef readonly _CRS _target_crs @staticmethod cdef _Transformer _from_pj( PJ_CONTEXT* context, PJ *transform_pj, bint always_xy, ) pyproj-3.7.1/pyproj/_transformer.pyi000066400000000000000000000071111475425760300176310ustar00rootroot00000000000000import numbers from array import array from typing import Any, NamedTuple from pyproj._crs import _CRS, AreaOfUse, Base, CoordinateOperation from pyproj.enums import ProjVersion, TransformDirection class AreaOfInterest(NamedTuple): west_lon_degree: float south_lat_degree: float east_lon_degree: float north_lat_degree: float class Factors(NamedTuple): meridional_scale: float parallel_scale: float areal_scale: float angular_distortion: float meridian_parallel_angle: float meridian_convergence: float tissot_semimajor: float tissot_semiminor: float dx_dlam: float dx_dphi: float dy_dlam: float dy_dphi: float class _TransformerGroup: _transformers: Any _unavailable_operations: list[CoordinateOperation] _best_available: bool def __init__( self, crs_from: str, crs_to: str, always_xy: bool, area_of_interest: AreaOfInterest | None, authority: str | None, accuracy: float | None, allow_ballpark: bool, allow_superseded: bool, ) -> None: ... class _Transformer(Base): input_geographic: bool output_geographic: bool is_pipeline: bool type_name: str @property def id(self) -> str: ... @property def description(self) -> str: ... @property def definition(self) -> str: ... @property def has_inverse(self) -> bool: ... @property def accuracy(self) -> float: ... @property def area_of_use(self) -> AreaOfUse: ... @property def source_crs(self) -> _CRS | None: ... @property def target_crs(self) -> _CRS | None: ... @property def operations(self) -> tuple[CoordinateOperation] | None: ... def get_last_used_operation(self) -> _Transformer: ... @property def is_network_enabled(self) -> bool: ... def to_proj4( self, version: ProjVersion | str = ProjVersion.PROJ_5, pretty: bool = False, ) -> str: ... @staticmethod def from_crs( crs_from: bytes, crs_to: bytes, always_xy: bool = False, area_of_interest: AreaOfInterest | None = None, authority: str | None = None, accuracy: str | None = None, allow_ballpark: bool | None = None, force_over: bool = False, only_best: bool | None = None, ) -> _Transformer: ... @staticmethod def from_pipeline(proj_pipeline: bytes) -> _Transformer: ... def _transform( self, inx: Any, iny: Any, inz: Any, intime: Any, direction: TransformDirection | str, radians: bool, errcheck: bool, ) -> None: ... def _transform_point( self, inx: numbers.Real, iny: numbers.Real, inz: numbers.Real, intime: numbers.Real, direction: TransformDirection | str, radians: bool, errcheck: bool, ) -> None: ... def _transform_sequence( self, stride: int, inseq: array[float], switch: bool, direction: TransformDirection | str, time_3rd: bool, radians: bool, errcheck: bool, ) -> None: ... def _transform_bounds( self, left: float, bottom: float, right: float, top: float, densify_pts: int = 21, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, ) -> tuple[float, float, float, float]: ... def _get_factors( self, longitude: Any, latitude: Any, radians: bool, errcheck: bool ) -> Factors: ... pyproj-3.7.1/pyproj/_transformer.pyx000066400000000000000000001146731475425760300176640ustar00rootroot00000000000000include "base.pxi" cimport cython from cpython.mem cimport PyMem_Free, PyMem_Malloc import copy import re import warnings from collections import namedtuple from pyproj._compat cimport cstrencode from pyproj._context cimport _clear_proj_error, _get_proj_error, pyproj_context_create from pyproj._crs cimport ( _CRS, Base, CoordinateOperation, _get_concatenated_operations, _to_proj4, _to_wkt, create_area_of_use, ) from pyproj._context import _LOGGER, get_context_manager from pyproj.aoi import AreaOfInterest from pyproj.enums import ProjVersion, TransformDirection, WktVersion from pyproj.exceptions import ProjError _AUTH_CODE_RE = re.compile(r"(?P\w+)\:(?P\w+)") cdef dict _TRANSFORMER_TYPE_MAP = { PJ_TYPE_UNKNOWN: "Unknown Transformer", PJ_TYPE_CONVERSION: "Conversion Transformer", PJ_TYPE_TRANSFORMATION: "Transformation Transformer", PJ_TYPE_CONCATENATED_OPERATION: "Concatenated Operation Transformer", PJ_TYPE_OTHER_COORDINATE_OPERATION: "Other Coordinate Operation Transformer", } Factors = namedtuple( "Factors", [ "meridional_scale", "parallel_scale", "areal_scale", "angular_distortion", "meridian_parallel_angle", "meridian_convergence", "tissot_semimajor", "tissot_semiminor", "dx_dlam", "dx_dphi", "dy_dlam", "dy_dphi", ], ) Factors.__doc__ = """ .. versionadded:: 2.6.0 These are the scaling and angular distortion factors. See PROJ :c:type:`PJ_FACTORS` documentation. Parameters ---------- meridional_scale: list[float] Meridional scale at coordinate. parallel_scale: list[float] Parallel scale at coordinate. areal_scale: list[float] Areal scale factor at coordinate. angular_distortion: list[float] Angular distortion at coordinate. meridian_parallel_angle: list[float] Meridian/parallel angle at coordinate. meridian_convergence: list[float] Meridian convergence at coordinate. Sometimes also described as *grid declination*. tissot_semimajor: list[float] Maximum scale factor. tissot_semiminor: list[float] Minimum scale factor. dx_dlam: list[float] Partial derivative of coordinate. dx_dphi: list[float] Partial derivative of coordinate. dy_dlam: list[float] Partial derivative of coordinate. dy_dphi: list[float] Partial derivative of coordinate. """ cdef PJ_DIRECTION get_pj_direction(object direction) except *: # optimized lookup to avoid creating a new instance every time # gh-1205 if not isinstance(direction, TransformDirection): direction = TransformDirection.create(direction) # to avoid __hash__ calls from a dictionary lookup, # we can inline the small number of options for performance if direction is TransformDirection.FORWARD: return PJ_FWD if direction is TransformDirection.INVERSE: return PJ_INV if direction is TransformDirection.IDENT: return PJ_IDENT raise KeyError(f"{direction} is not a valid TransformDirection") cdef class _TransformerGroup: def __cinit__(self): self.context = NULL self._context_manager = None self._transformers = [] self._unavailable_operations = [] self._best_available = True def __init__( self, _CRS crs_from not None, _CRS crs_to not None, bint always_xy, area_of_interest, bint allow_ballpark, str authority, double accuracy, bint allow_superseded, ): """ From PROJ docs: The operations are sorted with the most relevant ones first: by descending area (intersection of the transformation area with the area of interest, or intersection of the transformation with the area of use of the CRS), and by increasing accuracy. Operations with unknown accuracy are sorted last, whatever their area. """ self.context = pyproj_context_create() self._context_manager = get_context_manager() cdef: PJ_OPERATION_FACTORY_CONTEXT* operation_factory_context = NULL PJ_OBJ_LIST * pj_operations = NULL PJ* pj_transform = NULL const char* c_authority = NULL int num_operations = 0 int is_instantiable = 0 double west_lon_degree double south_lat_degree double east_lon_degree double north_lat_degree if authority is not None: tmp = cstrencode(authority) c_authority = tmp try: operation_factory_context = proj_create_operation_factory_context( self.context, c_authority, ) if area_of_interest is not None: if not isinstance(area_of_interest, AreaOfInterest): raise ProjError( "Area of interest must be of the type " "pyproj.transformer.AreaOfInterest." ) west_lon_degree = area_of_interest.west_lon_degree south_lat_degree = area_of_interest.south_lat_degree east_lon_degree = area_of_interest.east_lon_degree north_lat_degree = area_of_interest.north_lat_degree proj_operation_factory_context_set_area_of_interest( self.context, operation_factory_context, west_lon_degree, south_lat_degree, east_lon_degree, north_lat_degree, ) if accuracy > 0: proj_operation_factory_context_set_desired_accuracy( self.context, operation_factory_context, accuracy, ) proj_operation_factory_context_set_allow_ballpark_transformations( self.context, operation_factory_context, allow_ballpark, ) proj_operation_factory_context_set_discard_superseded( self.context, operation_factory_context, not allow_superseded, ) proj_operation_factory_context_set_grid_availability_use( self.context, operation_factory_context, PROJ_GRID_AVAILABILITY_IGNORED, ) proj_operation_factory_context_set_spatial_criterion( self.context, operation_factory_context, PROJ_SPATIAL_CRITERION_PARTIAL_INTERSECTION ) pj_operations = proj_create_operations( self.context, crs_from.projobj, crs_to.projobj, operation_factory_context, ) num_operations = proj_list_get_count(pj_operations) for iii in range(num_operations): pj_transform = proj_list_get( self.context, pj_operations, iii, ) is_instantiable = proj_coordoperation_is_instantiable( self.context, pj_transform, ) if is_instantiable: self._transformers.append( _Transformer._from_pj( self.context, pj_transform, always_xy, ) ) else: coordinate_operation = CoordinateOperation.create( self.context, pj_transform, ) self._unavailable_operations.append(coordinate_operation) if iii == 0: self._best_available = False warnings.warn( "Best transformation is not available due to missing " f"{coordinate_operation.grids[0]!r}" ) finally: if operation_factory_context != NULL: proj_operation_factory_context_destroy(operation_factory_context) if pj_operations != NULL: proj_list_destroy(pj_operations) _clear_proj_error() cdef PJ* proj_create_crs_to_crs( PJ_CONTEXT *ctx, const char *source_crs_str, const char *target_crs_str, PJ_AREA *area, str authority, str accuracy, allow_ballpark, bint force_over, only_best, ) except NULL: """ This is the same as proj_create_crs_to_crs in proj.h with the options added. It is a hack for stabilily reasons. Reference: https://github.com/pyproj4/pyproj/pull/800 """ cdef PJ *source_crs = proj_create(ctx, source_crs_str) if source_crs == NULL: _LOGGER.debug( "PROJ_DEBUG: proj_create_crs_to_crs: Cannot instantiate source_crs" ) return NULL cdef PJ *target_crs = proj_create(ctx, target_crs_str) if target_crs == NULL: proj_destroy(source_crs) _LOGGER.debug( "PROJ_DEBUG: proj_create_crs_to_crs: Cannot instantiate target_crs" ) return NULL cdef: const char* options[6] bytes b_authority bytes b_accuracy int options_index = 0 int options_init_iii = 0 for options_init_iii in range(6): options[options_init_iii] = NULL if authority is not None: b_authority = cstrencode(f"AUTHORITY={authority}") options[options_index] = b_authority options_index += 1 if accuracy is not None: b_accuracy = cstrencode(f"ACCURACY={accuracy}") options[options_index] = b_accuracy options_index += 1 if allow_ballpark is not None: if not allow_ballpark: options[options_index] = b"ALLOW_BALLPARK=NO" options_index += 1 if force_over: options[options_index] = b"FORCE_OVER=YES" options_index += 1 if only_best is not None: if only_best: options[options_index] = b"ONLY_BEST=YES" else: options[options_index] = b"ONLY_BEST=NO" cdef PJ* transform = NULL with nogil: transform = proj_create_crs_to_crs_from_pj( ctx, source_crs, target_crs, area, options, ) proj_destroy(source_crs) proj_destroy(target_crs) if transform == NULL: raise ProjError("Error creating Transformer from CRS.") return transform cdef class _Transformer(Base): def __cinit__(self): self._area_of_use = None self.type_name = "Unknown Transformer" self._operations = None self._source_crs = None self._target_crs = None def _initialize_from_projobj(self): self.proj_info = proj_pj_info(self.projobj) if self.proj_info.id == NULL: raise ProjError("Input is not a transformation.") cdef PJ_TYPE transformer_type = proj_get_type(self.projobj) self.type_name = _TRANSFORMER_TYPE_MAP[transformer_type] self._set_base_info() _clear_proj_error() @property def id(self): return self.proj_info.id @property def description(self): return self.proj_info.description @property def definition(self): return self.proj_info.definition @property def has_inverse(self): return self.proj_info.has_inverse == 1 @property def accuracy(self): return self.proj_info.accuracy @property def area_of_use(self): """ Returns ------- AreaOfUse: The area of use object with associated attributes. """ if self._area_of_use is not None: return self._area_of_use self._area_of_use = create_area_of_use(self.context, self.projobj) return self._area_of_use @property def source_crs(self): """ .. versionadded:: 3.3.0 Returns ------- _CRS | None: The source CRS of a CoordinateOperation. """ if self._source_crs is not None: return None if self._source_crs is False else self._source_crs cdef PJ * projobj = proj_get_source_crs(self.context, self.projobj) _clear_proj_error() if projobj == NULL: self._source_crs = False return None try: self._source_crs = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return self._source_crs @property def target_crs(self): """ .. versionadded:: 3.3.0 Returns ------- _CRS | None: The target CRS of a CoordinateOperation. """ if self._target_crs is not None: return None if self._target_crs is False else self._target_crs cdef PJ * projobj = proj_get_target_crs(self.context, self.projobj) _clear_proj_error() if projobj == NULL: self._target_crs = False return None try: self._target_crs = _CRS(_to_wkt( self.context, projobj, version=WktVersion.WKT2_2019, pretty=False, )) finally: proj_destroy(projobj) return self._target_crs @property def operations(self): """ .. versionadded:: 2.4.0 Returns ------- tuple[CoordinateOperation]: The operations in a concatenated operation. """ if self._operations is not None: return self._operations self._operations = _get_concatenated_operations(self.context, self.projobj) return self._operations def get_last_used_operation(self): cdef PJ* last_used_operation = proj_trans_get_last_used_operation(self.projobj) if last_used_operation == NULL: raise ProjError( "Last used operation not found. " "This is likely due to not initiating a transform." ) cdef PJ_CONTEXT* context = NULL try: context = pyproj_context_create() except: proj_destroy(last_used_operation) raise proj_assign_context(last_used_operation, context) return _Transformer._from_pj( context, last_used_operation, False, ) @property def is_network_enabled(self): """ .. versionadded:: 3.0.0 Returns ------- bool: If the network is enabled. """ return proj_context_is_network_enabled(self.context) == 1 def to_proj4(self, version=ProjVersion.PROJ_5, bint pretty=False): """ Convert the projection to a PROJ string. .. versionadded:: 3.1.0 Parameters ---------- version: pyproj.enums.ProjVersion, default=pyproj.enums.ProjVersion.PROJ_5 The version of the PROJ string output. pretty: bool, default=False If True, it will set the output to be a multiline string. Returns ------- str: The PROJ string. """ return _to_proj4(self.context, self.projobj, version=version, pretty=pretty) @staticmethod def from_crs( const char* crs_from, const char* crs_to, bint always_xy=False, area_of_interest=None, str authority=None, str accuracy=None, allow_ballpark=None, bint force_over=False, only_best=None, ): """ Create a transformer from CRS objects """ cdef: PJ_AREA *pj_area_of_interest = NULL double west_lon_degree double south_lat_degree double east_lon_degree double north_lat_degree _Transformer transformer = _Transformer() try: if area_of_interest is not None: if not isinstance(area_of_interest, AreaOfInterest): raise ProjError( "Area of interest must be of the type " "pyproj.transformer.AreaOfInterest." ) pj_area_of_interest = proj_area_create() west_lon_degree = area_of_interest.west_lon_degree south_lat_degree = area_of_interest.south_lat_degree east_lon_degree = area_of_interest.east_lon_degree north_lat_degree = area_of_interest.north_lat_degree proj_area_set_bbox( pj_area_of_interest, west_lon_degree, south_lat_degree, east_lon_degree, north_lat_degree, ) transformer.context = pyproj_context_create() transformer._context_manager = get_context_manager() transformer.projobj = proj_create_crs_to_crs( transformer.context, crs_from, crs_to, pj_area_of_interest, authority=authority, accuracy=accuracy, allow_ballpark=allow_ballpark, force_over=force_over, only_best=only_best, ) finally: if pj_area_of_interest != NULL: proj_area_destroy(pj_area_of_interest) transformer._init_from_crs(always_xy) return transformer @staticmethod cdef _Transformer _from_pj( PJ_CONTEXT* context, PJ *transform_pj, bint always_xy, ): """ Create a Transformer from a PJ* object """ cdef _Transformer transformer = _Transformer() transformer.context = context transformer._context_manager = get_context_manager() transformer.projobj = transform_pj if transformer.projobj == NULL: raise ProjError("Error creating Transformer.") transformer._init_from_crs(always_xy) return transformer @staticmethod def from_pipeline(const char *proj_pipeline): """ Create Transformer from a PROJ pipeline string. """ cdef _Transformer transformer = _Transformer() transformer.context = pyproj_context_create() transformer._context_manager = get_context_manager() auth_match = _AUTH_CODE_RE.match(proj_pipeline.strip()) if auth_match: # attempt to create coordinate operation from AUTH:CODE match_data = auth_match.groupdict() transformer.projobj = proj_create_from_database( transformer.context, cstrencode(match_data["authority"]), cstrencode(match_data["code"]), PJ_CATEGORY_COORDINATE_OPERATION, False, NULL, ) if transformer.projobj == NULL: # initialize projection transformer.projobj = proj_create( transformer.context, proj_pipeline, ) if transformer.projobj is NULL: raise ProjError(f"Invalid projection {proj_pipeline}.") transformer._initialize_from_projobj() return transformer def _set_always_xy(self): """ Setup the transformer so it has the axis order always in xy order. """ cdef PJ* always_xy_pj = proj_normalize_for_visualization( self.context, self.projobj, ) proj_destroy(self.projobj) self.projobj = always_xy_pj def _init_from_crs(self, bint always_xy): """ Finish initializing transformer properties from CRS objects """ if always_xy: self._set_always_xy() self._initialize_from_projobj() @cython.boundscheck(False) @cython.wraparound(False) def _transform( self, object inx, object iny, object inz, object intime, object direction, bint radians, bint errcheck, ): if self.id == "noop": return cdef: PJ_DIRECTION pj_direction = get_pj_direction(direction) PyBuffWriteManager xbuff = PyBuffWriteManager(inx) PyBuffWriteManager ybuff = PyBuffWriteManager(iny) PyBuffWriteManager zbuff PyBuffWriteManager tbuff Py_ssize_t buflenz Py_ssize_t buflent double* zz double* tt if inz is not None: zbuff = PyBuffWriteManager(inz) buflenz = zbuff.len zz = zbuff.data else: buflenz = xbuff.len zz = NULL if intime is not None: tbuff = PyBuffWriteManager(intime) buflent = tbuff.len tt = tbuff.data else: buflent = xbuff.len tt = NULL if not (xbuff.len == ybuff.len == buflenz == buflent): raise ProjError('x, y, z, and time must be same size if included.') cdef Py_ssize_t iii cdef int errno = 0 with nogil: # degrees to radians if not radians and proj_angular_input(self.projobj, pj_direction): for iii in range(xbuff.len): xbuff.data[iii] = xbuff.data[iii]*_DG2RAD ybuff.data[iii] = ybuff.data[iii]*_DG2RAD # radians to degrees elif radians and proj_degree_input(self.projobj, pj_direction): for iii in range(xbuff.len): xbuff.data[iii] = xbuff.data[iii]*_RAD2DG ybuff.data[iii] = ybuff.data[iii]*_RAD2DG proj_errno_reset(self.projobj) proj_trans_generic( self.projobj, pj_direction, xbuff.data, _DOUBLESIZE, xbuff.len, ybuff.data, _DOUBLESIZE, ybuff.len, zz, _DOUBLESIZE, xbuff.len, tt, _DOUBLESIZE, xbuff.len, ) errno = proj_errno(self.projobj) if errcheck and errno: with gil: raise ProjError( f"transform error: {proj_context_errno_string(self.context, errno)}" ) elif errcheck: with gil: if _get_proj_error() is not None: raise ProjError("transform error") # radians to degrees if not radians and proj_angular_output(self.projobj, pj_direction): for iii in range(xbuff.len): xbuff.data[iii] = xbuff.data[iii]*_RAD2DG ybuff.data[iii] = ybuff.data[iii]*_RAD2DG # degrees to radians elif radians and proj_degree_output(self.projobj, pj_direction): for iii in range(xbuff.len): xbuff.data[iii] = xbuff.data[iii]*_DG2RAD ybuff.data[iii] = ybuff.data[iii]*_DG2RAD _clear_proj_error() @cython.boundscheck(False) @cython.wraparound(False) def _transform_point( self, object inx, object iny, object inz, object intime, object direction, bint radians, bint errcheck, ): """ Optimized to transform a single point between two coordinate systems. """ cdef: double coord_x = inx double coord_y = iny double coord_z = 0 double coord_t = HUGE_VAL tuple expected_numeric_types = (int, float) # We do the type-checking internally here due to automatically # casting length-1 arrays to float that we don't want to return scalar for. # Ex: float(np.array([0])) works and we don't want to accept numpy arrays if not isinstance(inx, expected_numeric_types): raise TypeError("Scalar input expected for x") if not isinstance(iny, expected_numeric_types): raise TypeError("Scalar input expected for y") if inz is not None: if not isinstance(inz, expected_numeric_types): raise TypeError("Scalar input expected for z") coord_z = inz if intime is not None: if not isinstance(intime, expected_numeric_types): raise TypeError("Scalar input expected for t") coord_t = intime cdef tuple return_data if self.id == "noop": return_data = (inx, iny) if inz is not None: return_data += (inz,) if intime is not None: return_data += (intime,) return return_data cdef: PJ_DIRECTION pj_direction = get_pj_direction(direction) PJ_COORD projxyout PJ_COORD projxyin = proj_coord(coord_x, coord_y, coord_z, coord_t) with nogil: # degrees to radians if not radians and proj_angular_input(self.projobj, pj_direction): projxyin.uv.u *= _DG2RAD projxyin.uv.v *= _DG2RAD # radians to degrees elif radians and proj_degree_input(self.projobj, pj_direction): projxyin.uv.u *= _RAD2DG projxyin.uv.v *= _RAD2DG proj_errno_reset(self.projobj) projxyout = proj_trans(self.projobj, pj_direction, projxyin) errno = proj_errno(self.projobj) if errcheck and errno: with gil: raise ProjError( f"transform error: {proj_context_errno_string(self.context, errno)}" ) elif errcheck: with gil: if _clear_proj_error() is not None: raise ProjError("transform error") # radians to degrees if not radians and proj_angular_output(self.projobj, pj_direction): projxyout.xy.x *= _RAD2DG projxyout.xy.y *= _RAD2DG # degrees to radians elif radians and proj_degree_output(self.projobj, pj_direction): projxyout.xy.x *= _DG2RAD projxyout.xy.y *= _DG2RAD _clear_proj_error() return_data = (projxyout.xyzt.x, projxyout.xyzt.y) if inz is not None: return_data += (projxyout.xyzt.z,) if intime is not None: return_data += (projxyout.xyzt.t,) return return_data @cython.boundscheck(False) @cython.wraparound(False) def _transform_sequence( self, Py_ssize_t stride, object inseq, bint switch, object direction, bint time_3rd, bint radians, bint errcheck, ): # private function to itransform function if self.id == "noop": return cdef: PJ_DIRECTION pj_direction = get_pj_direction(direction) double *x double *y double *z double *tt if stride < 2: raise ProjError("coordinates must contain at least 2 values") cdef: PyBuffWriteManager coordbuff = PyBuffWriteManager(inseq) Py_ssize_t npts Py_ssize_t iii Py_ssize_t jjj int errno = 0 npts = coordbuff.len // stride with nogil: # degrees to radians if not radians and proj_angular_input(self.projobj, pj_direction): for iii in range(npts): jjj = stride * iii coordbuff.data[jjj] *= _DG2RAD coordbuff.data[jjj + 1] *= _DG2RAD # radians to degrees elif radians and proj_degree_input(self.projobj, pj_direction): for iii in range(npts): jjj = stride * iii coordbuff.data[jjj] *= _RAD2DG coordbuff.data[jjj + 1] *= _RAD2DG if not switch: x = coordbuff.data y = coordbuff.data + 1 else: x = coordbuff.data + 1 y = coordbuff.data # z coordinate if stride == 4 or (stride == 3 and not time_3rd): z = coordbuff.data + 2 else: z = NULL # time if stride == 3 and time_3rd: tt = coordbuff.data + 2 elif stride == 4: tt = coordbuff.data + 3 else: tt = NULL proj_errno_reset(self.projobj) proj_trans_generic( self.projobj, pj_direction, x, stride*_DOUBLESIZE, npts, y, stride*_DOUBLESIZE, npts, z, stride*_DOUBLESIZE, npts, tt, stride*_DOUBLESIZE, npts, ) errno = proj_errno(self.projobj) if errcheck and errno: with gil: raise ProjError( f"itransform error: {proj_context_errno_string(self.context, errno)}" ) elif errcheck: with gil: if _get_proj_error() is not None: raise ProjError("itransform error") # radians to degrees if not radians and proj_angular_output(self.projobj, pj_direction): for iii in range(npts): jjj = stride * iii coordbuff.data[jjj] *= _RAD2DG coordbuff.data[jjj + 1] *= _RAD2DG # degrees to radians elif radians and proj_degree_output(self.projobj, pj_direction): for iii in range(npts): jjj = stride * iii coordbuff.data[jjj] *= _DG2RAD coordbuff.data[jjj + 1] *= _DG2RAD _clear_proj_error() @cython.boundscheck(False) @cython.wraparound(False) def _transform_bounds( self, double left, double bottom, double right, double top, int densify_pts, bint radians, bint errcheck, object direction, ): cdef PJ_DIRECTION pj_direction = get_pj_direction(direction) if self.id == "noop" or pj_direction == PJ_IDENT: return (left, bottom, right, top) cdef: int errno = 0 bint success = True double out_left = left double out_bottom = bottom double out_right = right double out_top = top with nogil: # degrees to radians if not radians and proj_angular_input(self.projobj, pj_direction): left *= _DG2RAD bottom *= _DG2RAD right *= _DG2RAD top *= _DG2RAD # radians to degrees elif radians and proj_degree_input(self.projobj, pj_direction): left *= _RAD2DG bottom *= _RAD2DG right *= _RAD2DG top *= _RAD2DG proj_errno_reset(self.projobj) success = proj_trans_bounds( self.context, self.projobj, pj_direction, left, bottom, right, top, &out_left, &out_bottom, &out_right, &out_top, densify_pts, ) if not success or errcheck: errno = proj_errno(self.projobj) if errno: with gil: raise ProjError( "transform bounds error: " f"{proj_context_errno_string(self.context, errno)}" ) else: with gil: if _get_proj_error() is not None: raise ProjError("transform bounds error") # radians to degrees if not radians and proj_angular_output(self.projobj, pj_direction): out_left *= _RAD2DG out_bottom *= _RAD2DG out_right *= _RAD2DG out_top *= _RAD2DG # degrees to radians elif radians and proj_degree_output(self.projobj, pj_direction): out_left *= _DG2RAD out_bottom *= _DG2RAD out_right *= _DG2RAD out_top *= _DG2RAD _clear_proj_error() return out_left, out_bottom, out_right, out_top @cython.boundscheck(False) @cython.wraparound(False) def _get_factors(self, longitude, latitude, bint radians, bint errcheck): """ Calculates the projection factors PJ_FACTORS Designed to work with Proj class. Equivalent to `proj -S` command line. """ cdef PyBuffWriteManager lonbuff = PyBuffWriteManager(longitude) cdef PyBuffWriteManager latbuff = PyBuffWriteManager(latitude) if not lonbuff.len or not (lonbuff.len == latbuff.len): raise ProjError('longitude and latitude must be same size') # prepare the factors output meridional_scale = copy.copy(longitude) parallel_scale = copy.copy(longitude) areal_scale = copy.copy(longitude) angular_distortion = copy.copy(longitude) meridian_parallel_angle = copy.copy(longitude) meridian_convergence = copy.copy(longitude) tissot_semimajor = copy.copy(longitude) tissot_semiminor = copy.copy(longitude) dx_dlam = copy.copy(longitude) dx_dphi = copy.copy(longitude) dy_dlam = copy.copy(longitude) dy_dphi = copy.copy(longitude) cdef: PyBuffWriteManager meridional_scale_buff = PyBuffWriteManager( meridional_scale ) PyBuffWriteManager parallel_scale_buff = PyBuffWriteManager( parallel_scale ) PyBuffWriteManager areal_scale_buff = PyBuffWriteManager(areal_scale) PyBuffWriteManager angular_distortion_buff = PyBuffWriteManager( angular_distortion ) PyBuffWriteManager meridian_parallel_angle_buff = PyBuffWriteManager( meridian_parallel_angle ) PyBuffWriteManager meridian_convergence_buff = PyBuffWriteManager( meridian_convergence ) PyBuffWriteManager tissot_semimajor_buff = PyBuffWriteManager( tissot_semimajor ) PyBuffWriteManager tissot_semiminor_buff = PyBuffWriteManager( tissot_semiminor ) PyBuffWriteManager dx_dlam_buff = PyBuffWriteManager(dx_dlam) PyBuffWriteManager dx_dphi_buff = PyBuffWriteManager(dx_dphi) PyBuffWriteManager dy_dlam_buff = PyBuffWriteManager(dy_dlam) PyBuffWriteManager dy_dphi_buff = PyBuffWriteManager(dy_dphi) # calculate the factors PJ_COORD pj_coord = proj_coord(0, 0, 0, HUGE_VAL) PJ_FACTORS pj_factors int errno = 0 bint invalid_coord = 0 Py_ssize_t iii with nogil: for iii in range(lonbuff.len): pj_coord.uv.u = lonbuff.data[iii] pj_coord.uv.v = latbuff.data[iii] if not radians: pj_coord.uv.u *= _DG2RAD pj_coord.uv.v *= _DG2RAD # set both to HUGE_VAL if inf or nan proj_errno_reset(self.projobj) if pj_coord.uv.v == HUGE_VAL \ or pj_coord.uv.v != pj_coord.uv.v \ or pj_coord.uv.u == HUGE_VAL \ or pj_coord.uv.u != pj_coord.uv.u: invalid_coord = True else: invalid_coord = False pj_factors = proj_factors(self.projobj, pj_coord) errno = proj_errno(self.projobj) if errcheck and errno: with gil: raise ProjError( f"proj error: {proj_context_errno_string(self.context, errno)}" ) if errno or invalid_coord: meridional_scale_buff.data[iii] = HUGE_VAL parallel_scale_buff.data[iii] = HUGE_VAL areal_scale_buff.data[iii] = HUGE_VAL angular_distortion_buff.data[iii] = HUGE_VAL meridian_parallel_angle_buff.data[iii] = HUGE_VAL meridian_convergence_buff.data[iii] = HUGE_VAL tissot_semimajor_buff.data[iii] = HUGE_VAL tissot_semiminor_buff.data[iii] = HUGE_VAL dx_dlam_buff.data[iii] = HUGE_VAL dx_dphi_buff.data[iii] = HUGE_VAL dy_dlam_buff.data[iii] = HUGE_VAL dy_dphi_buff.data[iii] = HUGE_VAL else: meridional_scale_buff.data[iii] = pj_factors.meridional_scale parallel_scale_buff.data[iii] = pj_factors.parallel_scale areal_scale_buff.data[iii] = pj_factors.areal_scale angular_distortion_buff.data[iii] = ( pj_factors.angular_distortion * _RAD2DG ) meridian_parallel_angle_buff.data[iii] = ( pj_factors.meridian_parallel_angle * _RAD2DG ) meridian_convergence_buff.data[iii] = ( pj_factors.meridian_convergence * _RAD2DG ) tissot_semimajor_buff.data[iii] = pj_factors.tissot_semimajor tissot_semiminor_buff.data[iii] = pj_factors.tissot_semiminor dx_dlam_buff.data[iii] = pj_factors.dx_dlam dx_dphi_buff.data[iii] = pj_factors.dx_dphi dy_dlam_buff.data[iii] = pj_factors.dy_dlam dy_dphi_buff.data[iii] = pj_factors.dy_dphi _clear_proj_error() return Factors( meridional_scale=meridional_scale, parallel_scale=parallel_scale, areal_scale=areal_scale, angular_distortion=angular_distortion, meridian_parallel_angle=meridian_parallel_angle, meridian_convergence=meridian_convergence, tissot_semimajor=tissot_semimajor, tissot_semiminor=tissot_semiminor, dx_dlam=dx_dlam, dx_dphi=dx_dphi, dy_dlam=dy_dlam, dy_dphi=dy_dphi, ) pyproj-3.7.1/pyproj/_version.pyi000066400000000000000000000002041475425760300167500ustar00rootroot00000000000000PROJ_VERSION: tuple[int, int, int] PROJ_VERSION_STR: str PROJ_COMPILED_VERSION: tuple[int, int, int] PROJ_COMPILED_VERSION_STR: str pyproj-3.7.1/pyproj/_version.pyx000066400000000000000000000006021475425760300167710ustar00rootroot00000000000000include "proj.pxi" cdef PJ_INFO _PROJ_INFO = proj_info() PROJ_VERSION = (_PROJ_INFO.major, _PROJ_INFO.minor, _PROJ_INFO.patch) PROJ_VERSION_STR = f"{_PROJ_INFO.major}.{_PROJ_INFO.minor}.{_PROJ_INFO.patch}" PROJ_COMPILED_VERSION = (PROJ_VERSION_MAJOR, PROJ_VERSION_MINOR, PROJ_VERSION_PATCH) PROJ_COMPILED_VERSION_STR = f"{PROJ_VERSION_MAJOR}.{PROJ_VERSION_MINOR}.{PROJ_VERSION_PATCH}" pyproj-3.7.1/pyproj/aoi.py000066400000000000000000000065401475425760300155340ustar00rootroot00000000000000""" This module contains the structures related to areas of interest. """ from dataclasses import dataclass from typing import NamedTuple, Union from pyproj.utils import is_null @dataclass(frozen=True) class AreaOfInterest: """ .. versionadded:: 2.3.0 This is the area of interest for: - Transformations - Querying for CRS data. """ #: The west bound in degrees of the area of interest. west_lon_degree: float #: The south bound in degrees of the area of interest. south_lat_degree: float #: The east bound in degrees of the area of interest. east_lon_degree: float #: The north bound in degrees of the area of interest. north_lat_degree: float def __post_init__(self): if ( is_null(self.west_lon_degree) or is_null(self.south_lat_degree) or is_null(self.east_lon_degree) or is_null(self.north_lat_degree) ): raise ValueError("NaN or None values are not allowed.") class AreaOfUse(NamedTuple): """ .. versionadded:: 2.0.0 Area of Use for CRS, CoordinateOperation, or a Transformer. """ #: West bound of area of use. west: float #: South bound of area of use. south: float #: East bound of area of use. east: float #: North bound of area of use. north: float #: Name of area of use. name: str | None = None @property def bounds(self) -> tuple[float, float, float, float]: """ The bounds of the area of use. Returns ------- tuple[float, float, float, float] west, south, east, and north bounds. """ return self.west, self.south, self.east, self.north def __str__(self) -> str: return f"- name: {self.name}\n- bounds: {self.bounds}" @dataclass class BBox: """ Bounding box to check if data intersects/contains other bounding boxes. .. versionadded:: 3.0.0 """ #: West bound of bounding box. west: float #: South bound of bounding box. south: float #: East bound of bounding box. east: float #: North bound of bounding box. north: float def __post_init__(self): if ( is_null(self.west) or is_null(self.south) or is_null(self.east) or is_null(self.north) ): raise ValueError("NaN or None values are not allowed.") def intersects(self, other: Union["BBox", AreaOfUse]) -> bool: """ Parameters ---------- other: BBox The other BBox to use to check. Returns ------- bool: True if this BBox intersects the other bbox. """ return ( self.west < other.east and other.west < self.east and self.south < other.north and other.south < self.north ) def contains(self, other: Union["BBox", AreaOfUse]) -> bool: """ Parameters ---------- other: Union["BBox", AreaOfUse] The other BBox to use to check. Returns ------- bool: True if this BBox contains the other bbox. """ return ( other.west >= self.west and other.east <= self.east and other.south >= self.south and other.north <= self.north ) pyproj-3.7.1/pyproj/base.pxi000066400000000000000000000017551475425760300160510ustar00rootroot00000000000000from cpython.ref cimport PyObject from math import degrees, radians cdef double _DG2RAD = radians(1.) cdef double _RAD2DG = degrees(1.) cdef int _DOUBLESIZE = sizeof(double) cdef extern from "math.h" nogil: ctypedef enum: HUGE_VAL cdef extern from "Python.h": ctypedef enum: PyBUF_WRITABLE int PyObject_GetBuffer(PyObject *exporter, Py_buffer *view, int flags) void PyBuffer_Release(Py_buffer *view) cdef class PyBuffWriteManager: cdef Py_buffer buffer cdef double* data cdef public Py_ssize_t len def __cinit__(self): self.data = NULL def __init__(self, object data): if PyObject_GetBuffer(data, &self.buffer, PyBUF_WRITABLE) <> 0: raise BufferError("pyproj had a problem getting the buffer from data.") self.data = self.buffer.buf self.len = self.buffer.len // self.buffer.itemsize def __dealloc__(self): PyBuffer_Release(&self.buffer) self.data = NULL pyproj-3.7.1/pyproj/crs/000077500000000000000000000000001475425760300151745ustar00rootroot00000000000000pyproj-3.7.1/pyproj/crs/__init__.py000066400000000000000000000012311475425760300173020ustar00rootroot00000000000000""" This module interfaces with PROJ to produce a pythonic interface to the coordinate reference system (CRS) information through the CRS class. """ from pyproj._crs import ( # noqa: F401 pylint: disable=unused-import CoordinateOperation, CoordinateSystem, Datum, Ellipsoid, PrimeMeridian, is_proj, is_wkt, ) from pyproj.crs.crs import ( # noqa: F401 pylint: disable=unused-import CRS, BoundCRS, CompoundCRS, CustomConstructorCRS, DerivedGeographicCRS, GeocentricCRS, GeographicCRS, ProjectedCRS, VerticalCRS, ) from pyproj.exceptions import CRSError # noqa: F401 pylint: disable=unused-import pyproj-3.7.1/pyproj/crs/_cf1x8.py000066400000000000000000000655071475425760300166530ustar00rootroot00000000000000""" This module contains mappings necessary to convert from a CRS to a CF-1.8 compliant projection. http://cfconventions.org/cf-conventions/cf-conventions.html#appendix-grid-mappings """ import warnings from pyproj._crs import Datum, Ellipsoid, PrimeMeridian from pyproj.crs.coordinate_operation import ( AlbersEqualAreaConversion, AzimuthalEquidistantConversion, GeostationarySatelliteConversion, HotineObliqueMercatorBConversion, LambertAzimuthalEqualAreaConversion, LambertConformalConic1SPConversion, LambertConformalConic2SPConversion, LambertCylindricalEqualAreaConversion, LambertCylindricalEqualAreaScaleConversion, MercatorAConversion, MercatorBConversion, OrthographicConversion, PolarStereographicAConversion, PolarStereographicBConversion, PoleRotationNetCDFCFConversion, SinusoidalConversion, StereographicConversion, TransverseMercatorConversion, VerticalPerspectiveConversion, ) from pyproj.crs.datum import CustomDatum, CustomEllipsoid, CustomPrimeMeridian from pyproj.exceptions import CRSError def _horizontal_datum_from_params(cf_params): datum_name = cf_params.get("horizontal_datum_name") if datum_name and datum_name not in ("undefined", "unknown"): try: return Datum.from_name(datum_name) except CRSError: pass # step 1: build ellipsoid ellipsoid = None ellipsoid_name = cf_params.get("reference_ellipsoid_name") try: ellipsoid = CustomEllipsoid( name=ellipsoid_name or "undefined", semi_major_axis=cf_params.get("semi_major_axis"), semi_minor_axis=cf_params.get("semi_minor_axis"), inverse_flattening=cf_params.get("inverse_flattening"), radius=cf_params.get("earth_radius"), ) except CRSError: if ellipsoid_name and ellipsoid_name not in ("undefined", "unknown"): ellipsoid = Ellipsoid.from_name(ellipsoid_name) # step 2: build prime meridian prime_meridian = None prime_meridian_name = cf_params.get("prime_meridian_name") try: prime_meridian = CustomPrimeMeridian( name=prime_meridian_name or "undefined", longitude=cf_params["longitude_of_prime_meridian"], ) except KeyError: if prime_meridian_name and prime_meridian_name not in ("undefined", "unknown"): prime_meridian = PrimeMeridian.from_name(prime_meridian_name) # step 3: build datum if ellipsoid or prime_meridian: return CustomDatum( name=datum_name or "undefined", ellipsoid=ellipsoid or "WGS 84", prime_meridian=prime_meridian or "Greenwich", ) return None def _try_list_if_string(input_str): """ Attempt to convert string to list if it is a string """ if not isinstance(input_str, str): return input_str val_split = input_str.split(",") if len(val_split) > 1: return [float(sval.strip()) for sval in val_split] return input_str def _get_standard_parallels(standard_parallel): standard_parallel = _try_list_if_string(standard_parallel) try: first_parallel = float(standard_parallel) second_parallel = None except TypeError: first_parallel, second_parallel = standard_parallel return first_parallel, second_parallel def _albers_conical_equal_area(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_albers_equal_area """ first_parallel, second_parallel = _get_standard_parallels( cf_params["standard_parallel"] ) return AlbersEqualAreaConversion( latitude_first_parallel=first_parallel, latitude_second_parallel=second_parallel or 0.0, latitude_false_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_false_origin=cf_params.get("longitude_of_central_meridian", 0.0), easting_false_origin=cf_params.get("false_easting", 0.0), northing_false_origin=cf_params.get("false_northing", 0.0), ) def _azimuthal_equidistant(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#azimuthal-equidistant """ return AzimuthalEquidistantConversion( latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _geostationary(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_geostationary_projection """ try: sweep_angle_axis = cf_params["sweep_angle_axis"] except KeyError: sweep_angle_axis = {"x": "y", "y": "x"}[cf_params["fixed_angle_axis"].lower()] return GeostationarySatelliteConversion( sweep_angle_axis=sweep_angle_axis, satellite_height=cf_params["perspective_point_height"], latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _lambert_azimuthal_equal_area(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#lambert-azimuthal-equal-area """ return LambertAzimuthalEqualAreaConversion( latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _lambert_conformal_conic(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_lambert_conformal """ first_parallel, second_parallel = _get_standard_parallels( cf_params["standard_parallel"] ) if second_parallel is not None: return LambertConformalConic2SPConversion( latitude_first_parallel=first_parallel, latitude_second_parallel=second_parallel, latitude_false_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_false_origin=cf_params.get("longitude_of_central_meridian", 0.0), easting_false_origin=cf_params.get("false_easting", 0.0), northing_false_origin=cf_params.get("false_northing", 0.0), ) return LambertConformalConic1SPConversion( latitude_natural_origin=first_parallel, longitude_natural_origin=cf_params.get("longitude_of_central_meridian", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _lambert_cylindrical_equal_area(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_lambert_cylindrical_equal_area """ if "scale_factor_at_projection_origin" in cf_params: return LambertCylindricalEqualAreaScaleConversion( scale_factor_natural_origin=cf_params["scale_factor_at_projection_origin"], longitude_natural_origin=cf_params.get( "longitude_of_central_meridian", 0.0 ), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) return LambertCylindricalEqualAreaConversion( latitude_first_parallel=cf_params.get("standard_parallel", 0.0), longitude_natural_origin=cf_params.get("longitude_of_central_meridian", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _mercator(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_mercator """ if "scale_factor_at_projection_origin" in cf_params: return MercatorAConversion( latitude_natural_origin=cf_params.get("standard_parallel", 0.0), longitude_natural_origin=cf_params.get( "longitude_of_projection_origin", 0.0 ), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), scale_factor_natural_origin=cf_params["scale_factor_at_projection_origin"], ) return MercatorBConversion( latitude_first_parallel=cf_params.get("standard_parallel", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _oblique_mercator(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_oblique_mercator """ return HotineObliqueMercatorBConversion( latitude_projection_centre=cf_params["latitude_of_projection_origin"], longitude_projection_centre=cf_params["longitude_of_projection_origin"], azimuth_projection_centre=cf_params["azimuth_of_central_line"], angle_from_rectified_to_skew_grid=0.0, scale_factor_projection_centre=cf_params.get( "scale_factor_at_projection_origin", 1.0 ), easting_projection_centre=cf_params.get("false_easting", 0.0), northing_projection_centre=cf_params.get("false_northing", 0.0), ) def _orthographic(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_orthographic """ return OrthographicConversion( latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _polar_stereographic(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#polar-stereographic """ if "standard_parallel" in cf_params: return PolarStereographicBConversion( latitude_standard_parallel=cf_params["standard_parallel"], longitude_origin=cf_params["straight_vertical_longitude_from_pole"], false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) return PolarStereographicAConversion( latitude_natural_origin=cf_params["latitude_of_projection_origin"], longitude_natural_origin=cf_params["straight_vertical_longitude_from_pole"], false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), scale_factor_natural_origin=cf_params.get( "scale_factor_at_projection_origin", 1.0 ), ) def _sinusoidal(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_sinusoidal """ return SinusoidalConversion( longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _stereographic(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_stereographic """ return StereographicConversion( latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_projection_origin", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), scale_factor_natural_origin=cf_params.get( "scale_factor_at_projection_origin", 1.0 ), ) def _transverse_mercator(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_transverse_mercator """ return TransverseMercatorConversion( latitude_natural_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_natural_origin=cf_params.get("longitude_of_central_meridian", 0.0), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), scale_factor_natural_origin=cf_params.get( "scale_factor_at_central_meridian", 1.0 ), ) def _vertical_perspective(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#vertical-perspective """ return VerticalPerspectiveConversion( viewpoint_height=cf_params["perspective_point_height"], latitude_topocentric_origin=cf_params.get("latitude_of_projection_origin", 0.0), longitude_topocentric_origin=cf_params.get( "longitude_of_projection_origin", 0.0 ), false_easting=cf_params.get("false_easting", 0.0), false_northing=cf_params.get("false_northing", 0.0), ) def _rotated_latitude_longitude(cf_params): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_rotated_pole """ return PoleRotationNetCDFCFConversion( grid_north_pole_latitude=cf_params["grid_north_pole_latitude"], grid_north_pole_longitude=cf_params["grid_north_pole_longitude"], north_pole_grid_longitude=cf_params.get("north_pole_grid_longitude", 0.0), ) _GRID_MAPPING_NAME_MAP = { "albers_conical_equal_area": _albers_conical_equal_area, "azimuthal_equidistant": _azimuthal_equidistant, "geostationary": _geostationary, "lambert_azimuthal_equal_area": _lambert_azimuthal_equal_area, "lambert_conformal_conic": _lambert_conformal_conic, "lambert_cylindrical_equal_area": _lambert_cylindrical_equal_area, "mercator": _mercator, "oblique_mercator": _oblique_mercator, "orthographic": _orthographic, "polar_stereographic": _polar_stereographic, "sinusoidal": _sinusoidal, "stereographic": _stereographic, "transverse_mercator": _transverse_mercator, "vertical_perspective": _vertical_perspective, } _GEOGRAPHIC_GRID_MAPPING_NAME_MAP = { "rotated_latitude_longitude": _rotated_latitude_longitude } def _to_dict(operation): param_dict = {} for param in operation.params: param_dict[param.name.lower().replace(" ", "_")] = param.value return param_dict def _albers_conical_equal_area__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_albers_equal_area """ params = _to_dict(conversion) return { "grid_mapping_name": "albers_conical_equal_area", "standard_parallel": ( params["latitude_of_1st_standard_parallel"], params["latitude_of_2nd_standard_parallel"], ), "latitude_of_projection_origin": params["latitude_of_false_origin"], "longitude_of_central_meridian": params["longitude_of_false_origin"], "false_easting": params["easting_at_false_origin"], "false_northing": params["northing_at_false_origin"], } def _azimuthal_equidistant__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#azimuthal-equidistant """ params = _to_dict(conversion) return { "grid_mapping_name": "azimuthal_equidistant", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _geostationary__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_geostationary_projection """ params = _to_dict(conversion) sweep_angle_axis = "y" if conversion.method_name.lower().replace(" ", "_").endswith("(sweep_x)"): sweep_angle_axis = "x" return { "grid_mapping_name": "geostationary", "sweep_angle_axis": sweep_angle_axis, "perspective_point_height": params["satellite_height"], # geostationary satellites orbit around equator # so latitude_of_natural_origin is often left off and assumed to be 0.0 "latitude_of_projection_origin": params.get("latitude_of_natural_origin", 0.0), "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _lambert_azimuthal_equal_area__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#lambert-azimuthal-equal-area """ params = _to_dict(conversion) return { "grid_mapping_name": "lambert_azimuthal_equal_area", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _lambert_conformal_conic__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_lambert_conformal """ params = _to_dict(conversion) if conversion.method_name.lower().endswith("(2sp)"): return { "grid_mapping_name": "lambert_conformal_conic", "standard_parallel": ( params["latitude_of_1st_standard_parallel"], params["latitude_of_2nd_standard_parallel"], ), "latitude_of_projection_origin": params["latitude_of_false_origin"], "longitude_of_central_meridian": params["longitude_of_false_origin"], "false_easting": params["easting_at_false_origin"], "false_northing": params["northing_at_false_origin"], } return { "grid_mapping_name": "lambert_conformal_conic", "standard_parallel": params["latitude_of_natural_origin"], "longitude_of_central_meridian": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _lambert_cylindrical_equal_area__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_lambert_cylindrical_equal_area """ params = _to_dict(conversion) return { "grid_mapping_name": "lambert_cylindrical_equal_area", "standard_parallel": params["latitude_of_1st_standard_parallel"], "longitude_of_central_meridian": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _mercator__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_mercator """ params = _to_dict(conversion) if conversion.method_name.lower().replace(" ", "_").endswith("(variant_a)"): return { "grid_mapping_name": "mercator", "standard_parallel": params["latitude_of_natural_origin"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], "scale_factor_at_projection_origin": params[ "scale_factor_at_natural_origin" ], } return { "grid_mapping_name": "mercator", "standard_parallel": params["latitude_of_1st_standard_parallel"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _oblique_mercator__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_oblique_mercator """ params = _to_dict(conversion) if params["angle_from_rectified_to_skew_grid"] != 0: warnings.warn( "angle from rectified to skew grid parameter lost in conversion to CF" ) try: azimuth_of_central_line = params["azimuth_of_initial_line"] except KeyError: azimuth_of_central_line = params["azimuth_at_projection_centre"] try: scale_factor_at_projection_origin = params["scale_factor_on_initial_line"] except KeyError: scale_factor_at_projection_origin = params["scale_factor_at_projection_centre"] return { "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": params["latitude_of_projection_centre"], "longitude_of_projection_origin": params["longitude_of_projection_centre"], "azimuth_of_central_line": azimuth_of_central_line, "scale_factor_at_projection_origin": scale_factor_at_projection_origin, "false_easting": params["easting_at_projection_centre"], "false_northing": params["northing_at_projection_centre"], } def _orthographic__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_orthographic """ params = _to_dict(conversion) return { "grid_mapping_name": "orthographic", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _polar_stereographic__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#polar-stereographic """ params = _to_dict(conversion) if conversion.method_name.lower().endswith("(variant b)"): return { "grid_mapping_name": "polar_stereographic", "standard_parallel": params["latitude_of_standard_parallel"], "straight_vertical_longitude_from_pole": params["longitude_of_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } return { "grid_mapping_name": "polar_stereographic", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "straight_vertical_longitude_from_pole": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], "scale_factor_at_projection_origin": params["scale_factor_at_natural_origin"], } def _sinusoidal__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_sinusoidal """ params = _to_dict(conversion) return { "grid_mapping_name": "sinusoidal", "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _stereographic__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_stereographic """ params = _to_dict(conversion) return { "grid_mapping_name": "stereographic", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "longitude_of_projection_origin": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], "scale_factor_at_projection_origin": params["scale_factor_at_natural_origin"], } def _transverse_mercator__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_transverse_mercator """ params = _to_dict(conversion) return { "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": params["latitude_of_natural_origin"], "longitude_of_central_meridian": params["longitude_of_natural_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], "scale_factor_at_central_meridian": params["scale_factor_at_natural_origin"], } def _vertical_perspective__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#vertical-perspective """ params = _to_dict(conversion) return { "grid_mapping_name": "vertical_perspective", "perspective_point_height": params["viewpoint_height"], "latitude_of_projection_origin": params["latitude_of_topocentric_origin"], "longitude_of_projection_origin": params["longitude_of_topocentric_origin"], "false_easting": params["false_easting"], "false_northing": params["false_northing"], } def _rotated_latitude_longitude__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_rotated_pole """ params = _to_dict(conversion) return { "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": params["o_lat_p"], # https://github.com/pyproj4/pyproj/issues/927 "grid_north_pole_longitude": params["lon_0"] - 180, "north_pole_grid_longitude": params["o_lon_p"], } def _pole_rotation_netcdf__to_cf(conversion): """ http://cfconventions.org/cf-conventions/cf-conventions.html#_rotated_pole https://github.com/OSGeo/PROJ/pull/2835 """ params = _to_dict(conversion) return { "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": params[ "grid_north_pole_latitude_(netcdf_cf_convention)" ], "grid_north_pole_longitude": params[ "grid_north_pole_longitude_(netcdf_cf_convention)" ], "north_pole_grid_longitude": params[ "north_pole_grid_longitude_(netcdf_cf_convention)" ], } _INVERSE_GRID_MAPPING_NAME_MAP = { "albers_equal_area": _albers_conical_equal_area__to_cf, "modified_azimuthal_equidistant": _azimuthal_equidistant__to_cf, "azimuthal_equidistant": _azimuthal_equidistant__to_cf, "geostationary_satellite_(sweep_x)": _geostationary__to_cf, "geostationary_satellite_(sweep_y)": _geostationary__to_cf, "lambert_azimuthal_equal_area": _lambert_azimuthal_equal_area__to_cf, "lambert_conic_conformal_(2sp)": _lambert_conformal_conic__to_cf, "lambert_conic_conformal_(1sp)": _lambert_conformal_conic__to_cf, "lambert_cylindrical_equal_area": _lambert_cylindrical_equal_area__to_cf, "mercator_(variant_a)": _mercator__to_cf, "mercator_(variant_b)": _mercator__to_cf, "hotine_oblique_mercator_(variant_b)": _oblique_mercator__to_cf, "orthographic": _orthographic__to_cf, "polar_stereographic_(variant_a)": _polar_stereographic__to_cf, "polar_stereographic_(variant_b)": _polar_stereographic__to_cf, "sinusoidal": _sinusoidal__to_cf, "stereographic": _stereographic__to_cf, "transverse_mercator": _transverse_mercator__to_cf, "vertical_perspective": _vertical_perspective__to_cf, } _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP = { "proj ob_tran o_proj=longlat": _rotated_latitude_longitude__to_cf, "proj ob_tran o_proj=lonlat": _rotated_latitude_longitude__to_cf, "proj ob_tran o_proj=latlon": _rotated_latitude_longitude__to_cf, "proj ob_tran o_proj=latlong": _rotated_latitude_longitude__to_cf, "pole rotation (netcdf cf convention)": _pole_rotation_netcdf__to_cf, } pyproj-3.7.1/pyproj/crs/coordinate_operation.py000066400000000000000000001654431475425760300217720ustar00rootroot00000000000000""" This module is for building operations to be used when building a CRS. :ref:`operations` """ # pylint: disable=too-many-lines import warnings from typing import Any, Optional from pyproj._crs import CoordinateOperation from pyproj._version import PROJ_VERSION from pyproj.exceptions import CRSError class AlbersEqualAreaConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Albers Equal Area Conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_first_parallel: float, latitude_second_parallel: float, latitude_false_origin: float = 0.0, longitude_false_origin: float = 0.0, easting_false_origin: float = 0.0, northing_false_origin: float = 0.0, ): """ Parameters ---------- latitude_first_parallel: float First standard parallel (lat_1). latitude_second_parallel: float Second standard parallel (lat_2). latitude_false_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_false_origin: float, default=0.0 Longitude of projection center (lon_0). easting_false_origin: float, default=0.0 False easting (x_0). northing_false_origin: float, default=0.0 False northing (y_0). """ aea_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Albers Equal Area", "id": {"authority": "EPSG", "code": 9822}, }, "parameters": [ { "name": "Latitude of false origin", "value": latitude_false_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8821}, }, { "name": "Longitude of false origin", "value": longitude_false_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8822}, }, { "name": "Latitude of 1st standard parallel", "value": latitude_first_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8823}, }, { "name": "Latitude of 2nd standard parallel", "value": latitude_second_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8824}, }, { "name": "Easting at false origin", "value": easting_false_origin, "unit": { "type": "LinearUnit", "name": "Metre", "conversion_factor": 1, }, "id": {"authority": "EPSG", "code": 8826}, }, { "name": "Northing at false origin", "value": northing_false_origin, "unit": { "type": "LinearUnit", "name": "Metre", "conversion_factor": 1, }, "id": {"authority": "EPSG", "code": 8827}, }, ], } return cls.from_json_dict(aea_json) class AzimuthalEquidistantConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 AzumuthalEquidistantConversion .. versionadded:: 3.2.0 AzimuthalEquidistantConversion Class for constructing the Modified Azimuthal Equidistant conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ aeqd_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Modified Azimuthal Equidistant", "id": {"authority": "EPSG", "code": 9832}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(aeqd_json) class GeostationarySatelliteConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Geostationary Satellite conversion. :ref:`PROJ docs ` """ def __new__( cls, sweep_angle_axis: str, satellite_height: float, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- sweep_angle_axis: str Sweep angle axis of the viewing instrument. Valid options are “X” and “Y”. satellite_height: float Satellite height. latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ sweep_angle_axis = sweep_angle_axis.strip().upper() valid_sweep_axis = ("X", "Y") if sweep_angle_axis not in valid_sweep_axis: raise CRSError(f"sweep_angle_axis only supports {valid_sweep_axis}") if latitude_natural_origin != 0: warnings.warn( "The latitude of natural origin (lat_0) is not used " "within PROJ. It is only supported for exporting to " "the WKT or PROJ JSON formats." ) geos_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": {"name": f"Geostationary Satellite (Sweep {sweep_angle_axis})"}, "parameters": [ { "name": "Satellite height", "value": satellite_height, "unit": "metre", }, { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(geos_json) class LambertAzimuthalEqualAreaConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 LambertAzumuthalEqualAreaConversion .. versionadded:: 3.2.0 LambertAzimuthalEqualAreaConversion Class for constructing the Lambert Azimuthal Equal Area conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ laea_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Lambert Azimuthal Equal Area", "id": {"authority": "EPSG", "code": 9820}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(laea_json) class LambertConformalConic2SPConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Lambert Conformal Conic 2SP conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_first_parallel: float, latitude_second_parallel: float, latitude_false_origin: float = 0.0, longitude_false_origin: float = 0.0, easting_false_origin: float = 0.0, northing_false_origin: float = 0.0, ): """ Parameters ---------- latitude_first_parallel: float Latitude of 1st standard parallel (lat_1). latitude_second_parallel: float Latitude of 2nd standard parallel (lat_2). latitude_false_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_false_origin: float, default=0.0 Longitude of projection center (lon_0). easting_false_origin: float, default=0.0 False easting (x_0). northing_false_origin: float, default=0.0 False northing (y_0). """ lcc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Lambert Conic Conformal (2SP)", "id": {"authority": "EPSG", "code": 9802}, }, "parameters": [ { "name": "Latitude of 1st standard parallel", "value": latitude_first_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8823}, }, { "name": "Latitude of 2nd standard parallel", "value": latitude_second_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8824}, }, { "name": "Latitude of false origin", "value": latitude_false_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8821}, }, { "name": "Longitude of false origin", "value": longitude_false_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8822}, }, { "name": "Easting at false origin", "value": easting_false_origin, "unit": "metre", "id": {"authority": "EPSG", "code": 8826}, }, { "name": "Northing at false origin", "value": northing_false_origin, "unit": "metre", "id": {"authority": "EPSG", "code": 8827}, }, ], } return cls.from_json_dict(lcc_json) class LambertConformalConic1SPConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Lambert Conformal Conic 1SP conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=1.0 Scale factor at natural origin (k_0). """ lcc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Lambert Conic Conformal (1SP)", "id": {"authority": "EPSG", "code": 9801}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "Scale factor at natural origin", "value": scale_factor_natural_origin, "unit": "unity", "id": {"authority": "EPSG", "code": 8805}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(lcc_json) class LambertCylindricalEqualAreaConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Lambert Cylindrical Equal Area conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_first_parallel: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_first_parallel: float, default=0.0 Latitude of 1st standard parallel (lat_ts). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ cea_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Lambert Cylindrical Equal Area", "id": {"authority": "EPSG", "code": 9835}, }, "parameters": [ { "name": "Latitude of 1st standard parallel", "value": latitude_first_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8823}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(cea_json) class LambertCylindricalEqualAreaScaleConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Lambert Cylindrical Equal Area conversion. This version uses the scale factor and differs from the official version. The scale factor will be converted to the Latitude of 1st standard parallel (lat_ts) when exporting to WKT in PROJ>=7.0.0. Previous version will export it as a PROJ-based coordinate operation in the WKT. :ref:`PROJ docs ` """ def __new__( cls, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=1.0 Scale factor at natural origin (k or k_0). """ # pylint: disable=import-outside-toplevel from pyproj.crs import CRS # hack due to: https://github.com/OSGeo/PROJ/issues/1881 proj_string = ( "+proj=cea " f"+lon_0={longitude_natural_origin} " f"+x_0={false_easting} " f"+y_0={false_northing} " f"+k_0={scale_factor_natural_origin}" ) return cls.from_json( CRS(proj_string).coordinate_operation.to_json() # type: ignore[union-attr] ) class MercatorAConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Mercator (variant A) conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of natural origin (lat_0). Must be 0 by `this conversion's definition `_. longitude_natural_origin: float, default=0.0 Longitude of natural origin (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=1.0 Scale factor at natural origin (k or k_0). """ if latitude_natural_origin != 0: raise CRSError( "This conversion is defined for only latitude_natural_origin = 0." ) merc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Mercator (variant A)", "id": {"authority": "EPSG", "code": 9804}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "Scale factor at natural origin", "value": scale_factor_natural_origin, "unit": "unity", "id": {"authority": "EPSG", "code": 8805}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(merc_json) class MercatorBConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Mercator (variant B) conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_first_parallel: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_first_parallel: float, default=0.0 Latitude of 1st standard parallel (lat_ts). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ merc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Mercator (variant B)", "id": {"authority": "EPSG", "code": 9805}, }, "parameters": [ { "name": "Latitude of 1st standard parallel", "value": latitude_first_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8823}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(merc_json) class HotineObliqueMercatorBConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 .. versionadded:: 3.7.0 azimuth_projection_centre, scale_factor_projection_centre Class for constructing the Hotine Oblique Mercator (variant B) conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_projection_centre: float, longitude_projection_centre: float, angle_from_rectified_to_skew_grid: float, easting_projection_centre: float = 0.0, northing_projection_centre: float = 0.0, azimuth_projection_centre: Optional[float] = None, scale_factor_projection_centre: Optional[float] = None, azimuth_initial_line: Optional[float] = None, scale_factor_on_initial_line: Optional[float] = None, ): """ Parameters ---------- latitude_projection_centre: float Latitude of projection centre (lat_0). longitude_projection_centre: float Longitude of projection centre (lonc). azimuth_projection_centre: float Azimuth of initial line (alpha). angle_from_rectified_to_skew_grid: float Angle from Rectified to Skew Grid (gamma). scale_factor_projection_centre: float, default=1.0 Scale factor on initial line (k or k_0). easting_projection_centre: float, default=0.0 Easting at projection centre (x_0). northing_projection_centre: float, default=0.0 Northing at projection centre (y_0). azimuth_initial_line: float Deprecated alias for azimuth_projection_centre, scale_factor_on_initial_line: float Deprecated alias for scale_factor_projection_centre. """ if scale_factor_on_initial_line is not None: if scale_factor_projection_centre is not None: raise ValueError( "scale_factor_projection_centre and scale_factor_on_initial_line " "cannot be provided together." ) warnings.warn( "scale_factor_on_initial_line is deprecated. " "Use scale_factor_projection_centre instead.", FutureWarning, stacklevel=2, ) scale_factor_projection_centre = scale_factor_on_initial_line elif scale_factor_projection_centre is None: scale_factor_projection_centre = 1.0 if azimuth_projection_centre is None and azimuth_initial_line is None: raise ValueError( "azimuth_projection_centre or azimuth_initial_line must be provided." ) if azimuth_initial_line is not None: if azimuth_projection_centre is not None: raise ValueError( "azimuth_projection_centre and azimuth_initial_line cannot be " "provided together." ) warnings.warn( "azimuth_initial_line is deprecated. " "Use azimuth_projection_centre instead.", FutureWarning, stacklevel=2, ) azimuth_projection_centre = azimuth_initial_line omerc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Hotine Oblique Mercator (variant B)", "id": {"authority": "EPSG", "code": 9815}, }, "parameters": [ { "name": "Latitude of projection centre", "value": latitude_projection_centre, "unit": "degree", "id": {"authority": "EPSG", "code": 8811}, }, { "name": "Longitude of projection centre", "value": longitude_projection_centre, "unit": "degree", "id": {"authority": "EPSG", "code": 8812}, }, { "name": ( "Azimuth at projection centre" if PROJ_VERSION >= (9, 5, 0) else "Azimuth of initial line" ), "value": azimuth_projection_centre, "unit": "degree", "id": {"authority": "EPSG", "code": 8813}, }, { "name": "Angle from Rectified to Skew Grid", "value": angle_from_rectified_to_skew_grid, "unit": "degree", "id": {"authority": "EPSG", "code": 8814}, }, { "name": ( "Scale factor at projection centre" if PROJ_VERSION >= (9, 5, 0) else "Scale factor on initial line" ), "value": scale_factor_projection_centre, "unit": "unity", "id": {"authority": "EPSG", "code": 8815}, }, { "name": "Easting at projection centre", "value": easting_projection_centre, "unit": "metre", "id": {"authority": "EPSG", "code": 8816}, }, { "name": "Northing at projection centre", "value": northing_projection_centre, "unit": "metre", "id": {"authority": "EPSG", "code": 8817}, }, ], } return cls.from_json_dict(omerc_json) class OrthographicConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Orthographic conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ ortho_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Orthographic", "id": {"authority": "EPSG", "code": 9840}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(ortho_json) class PolarStereographicAConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Polar Stereographic A conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- latitude_natural_origin: float Latitude of natural origin (lat_0). Either +90 or -90. longitude_natural_origin: float, default=0.0 Longitude of natural origin (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=0.0 Scale factor at natural origin (k or k_0). """ stere_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Polar Stereographic (variant A)", "id": {"authority": "EPSG", "code": 9810}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "Scale factor at natural origin", "value": scale_factor_natural_origin, "unit": "unity", "id": {"authority": "EPSG", "code": 8805}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(stere_json) class PolarStereographicBConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Polar Stereographic B conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_standard_parallel: float = 0.0, longitude_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_standard_parallel: float, default=0.0 Latitude of standard parallel (lat_ts). longitude_origin: float, default=0.0 Longitude of origin (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ stere_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Polar Stereographic (variant B)", "id": {"authority": "EPSG", "code": 9829}, }, "parameters": [ { "name": "Latitude of standard parallel", "value": latitude_standard_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8832}, }, { "name": "Longitude of origin", "value": longitude_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8833}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(stere_json) class SinusoidalConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Sinusoidal conversion. :ref:`PROJ docs ` """ def __new__( cls, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ sinu_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": {"name": "Sinusoidal"}, "parameters": [ { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(sinu_json) class StereographicConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Stereographic conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of natural origin (lat_0). longitude_natural_origin: float, default=0.0 Longitude of natural origin (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=1.0 Scale factor at natural origin (k or k_0). """ stere_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": {"name": "Stereographic"}, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "Scale factor at natural origin", "value": scale_factor_natural_origin, "unit": "unity", "id": {"authority": "EPSG", "code": 8805}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(stere_json) class UTMConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the UTM conversion. :ref:`PROJ docs ` """ def __new__(cls, zone: str, hemisphere: str = "N"): """ Parameters ---------- zone: int UTM Zone between 1-60. hemisphere: str, default="N" Either N for North or S for South. """ return cls.from_name(f"UTM zone {zone}{hemisphere}") class TransverseMercatorConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Transverse Mercator conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, scale_factor_natural_origin: float = 1.0, ): """ Parameters ---------- latitude_natural_origin: float, default=0.0 Latitude of projection center (lat_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). scale_factor_natural_origin: float, default=1.0 Scale factor at natural origin (k or k_0). """ tmerc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Transverse Mercator", "id": {"authority": "EPSG", "code": 9807}, }, "parameters": [ { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "Scale factor at natural origin", "value": scale_factor_natural_origin, "unit": "unity", "id": {"authority": "EPSG", "code": 8805}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(tmerc_json) class VerticalPerspectiveConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Vertical Perspective conversion. :ref:`PROJ docs ` """ def __new__( cls, viewpoint_height: float, latitude_topocentric_origin: float = 0.0, longitude_topocentric_origin: float = 0.0, ellipsoidal_height_topocentric_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- viewpoint_height: float Viewpoint height (h). latitude_topocentric_origin: float, default=0.0 Latitude of topocentric origin (lat_0). longitude_topocentric_origin: float, default=0.0 Longitude of topocentric origin (lon_0). ellipsoidal_height_topocentric_origin: float, default=0.0 Ellipsoidal height of topocentric origin. false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ nsper_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Vertical Perspective", "id": {"authority": "EPSG", "code": 9838}, }, "parameters": [ { "name": "Latitude of topocentric origin", "value": latitude_topocentric_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8834}, }, { "name": "Longitude of topocentric origin", "value": longitude_topocentric_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8835}, }, { "name": "Ellipsoidal height of topocentric origin", "value": ellipsoidal_height_topocentric_origin, "unit": "metre", "id": {"authority": "EPSG", "code": 8836}, }, { "name": "Viewpoint height", "value": viewpoint_height, "unit": "metre", "id": {"authority": "EPSG", "code": 8840}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(nsper_json) class RotatedLatitudeLongitudeConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Rotated Latitude Longitude conversion. :ref:`PROJ docs ` """ def __new__(cls, o_lat_p: float, o_lon_p: float, lon_0: float = 0.0): """ Parameters ---------- o_lat_p: float Latitude of the North pole of the unrotated source CRS, expressed in the rotated geographic CRS. o_lon_p: float Longitude of the North pole of the unrotated source CRS, expressed in the rotated geographic CRS. lon_0: float, default=0.0 Longitude of projection center. """ rot_latlon_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": {"name": "PROJ ob_tran o_proj=longlat"}, "parameters": [ {"name": "o_lat_p", "value": o_lat_p, "unit": "degree"}, {"name": "o_lon_p", "value": o_lon_p, "unit": "degree"}, {"name": "lon_0", "value": lon_0, "unit": "degree"}, ], } return cls.from_json_dict(rot_latlon_json) class PoleRotationNetCDFCFConversion(CoordinateOperation): """ .. versionadded:: 3.3.0 Class for constructing the Pole rotation (netCDF CF convention) conversion. http://cfconventions.org/cf-conventions/cf-conventions.html#_rotated_pole :ref:`PROJ docs ` """ def __new__( cls, grid_north_pole_latitude: float, grid_north_pole_longitude: float, north_pole_grid_longitude: float = 0.0, ): """ Parameters ---------- grid_north_pole_latitude: float Latitude of the North pole of the unrotated source CRS, expressed in the rotated geographic CRS (o_lat_p) grid_north_pole_longitude: float Longitude of projection center (lon_0 - 180). north_pole_grid_longitude: float, default=0.0 Longitude of the North pole of the unrotated source CRS, expressed in the rotated geographic CRS (o_lon_p). """ rot_latlon_json = { "$schema": "https://proj.org/schemas/v0.4/projjson.schema.json", "type": "Conversion", "name": "Pole rotation (netCDF CF convention)", "method": {"name": "Pole rotation (netCDF CF convention)"}, "parameters": [ { "name": "Grid north pole latitude (netCDF CF convention)", "value": grid_north_pole_latitude, "unit": "degree", }, { "name": "Grid north pole longitude (netCDF CF convention)", "value": grid_north_pole_longitude, "unit": "degree", }, { "name": "North pole grid longitude (netCDF CF convention)", "value": north_pole_grid_longitude, "unit": "degree", }, ], } return cls.from_json_dict(rot_latlon_json) class EquidistantCylindricalConversion(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the Equidistant Cylintrical (Plate Carrée) conversion. :ref:`PROJ docs ` """ def __new__( cls, latitude_first_parallel: float = 0.0, latitude_natural_origin: float = 0.0, longitude_natural_origin: float = 0.0, false_easting: float = 0.0, false_northing: float = 0.0, ): """ Parameters ---------- latitude_first_parallel: float, default=0.0 Latitude of 1st standard parallel (lat_ts). latitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). longitude_natural_origin: float, default=0.0 Longitude of projection center (lon_0). false_easting: float, default=0.0 False easting (x_0). false_northing: float, default=0.0 False northing (y_0). """ eqc_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Conversion", "name": "unknown", "method": { "name": "Equidistant Cylindrical", "id": {"authority": "EPSG", "code": 1028}, }, "parameters": [ { "name": "Latitude of 1st standard parallel", "value": latitude_first_parallel, "unit": "degree", "id": {"authority": "EPSG", "code": 8823}, }, { "name": "Latitude of natural origin", "value": latitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8801}, }, { "name": "Longitude of natural origin", "value": longitude_natural_origin, "unit": "degree", "id": {"authority": "EPSG", "code": 8802}, }, { "name": "False easting", "value": false_easting, "unit": "metre", "id": {"authority": "EPSG", "code": 8806}, }, { "name": "False northing", "value": false_northing, "unit": "metre", "id": {"authority": "EPSG", "code": 8807}, }, ], } return cls.from_json_dict(eqc_json) # Add an alias for PlateCarree PlateCarreeConversion = EquidistantCylindricalConversion class ToWGS84Transformation(CoordinateOperation): """ .. versionadded:: 2.5.0 Class for constructing the ToWGS84 Transformation. """ def __new__( cls, source_crs: Any, x_axis_translation: float = 0, y_axis_translation: float = 0, z_axis_translation: float = 0, x_axis_rotation: float = 0, y_axis_rotation: float = 0, z_axis_rotation: float = 0, scale_difference: float = 0, ): """ Parameters ---------- source_crs: Any Input to create the Source CRS. x_axis_translation: float, default=0.0 X-axis translation. y_axis_translation: float, default=0.0 Y-axis translation. z_axis_translation: float, default=0.0 Z-axis translation. x_axis_rotation: float, default=0.0 X-axis rotation. y_axis_rotation: float, default=0.0 Y-axis rotation. z_axis_rotation: float, default=0.0 Z-axis rotation. scale_difference: float, default=0.0 Scale difference. """ # pylint: disable=import-outside-toplevel from pyproj.crs import CRS towgs84_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Transformation", "name": "Transformation from unknown to WGS84", "source_crs": CRS.from_user_input(source_crs).to_json_dict(), "target_crs": { "type": "GeographicCRS", "name": "WGS 84", "datum": { "type": "GeodeticReferenceFrame", "name": "World Geodetic System 1984", "ellipsoid": { "name": "WGS 84", "semi_major_axis": 6378137, "inverse_flattening": 298.257223563, }, }, "coordinate_system": { "subtype": "ellipsoidal", "axis": [ { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": "degree", }, { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": "degree", }, ], }, "id": {"authority": "EPSG", "code": 4326}, }, "method": { "name": "Position Vector transformation (geog2D domain)", "id": {"authority": "EPSG", "code": 9606}, }, "parameters": [ { "name": "X-axis translation", "value": x_axis_translation, "unit": "metre", "id": {"authority": "EPSG", "code": 8605}, }, { "name": "Y-axis translation", "value": y_axis_translation, "unit": "metre", "id": {"authority": "EPSG", "code": 8606}, }, { "name": "Z-axis translation", "value": z_axis_translation, "unit": "metre", "id": {"authority": "EPSG", "code": 8607}, }, { "name": "X-axis rotation", "value": x_axis_rotation, "unit": { "type": "AngularUnit", "name": "arc-second", "conversion_factor": 4.84813681109536e-06, }, "id": {"authority": "EPSG", "code": 8608}, }, { "name": "Y-axis rotation", "value": y_axis_rotation, "unit": { "type": "AngularUnit", "name": "arc-second", "conversion_factor": 4.84813681109536e-06, }, "id": {"authority": "EPSG", "code": 8609}, }, { "name": "Z-axis rotation", "value": z_axis_rotation, "unit": { "type": "AngularUnit", "name": "arc-second", "conversion_factor": 4.84813681109536e-06, }, "id": {"authority": "EPSG", "code": 8610}, }, { "name": "Scale difference", "value": scale_difference, "unit": { "type": "ScaleUnit", "name": "parts per million", "conversion_factor": 1e-06, }, "id": {"authority": "EPSG", "code": 8611}, }, ], } return cls.from_json_dict(towgs84_json) pyproj-3.7.1/pyproj/crs/coordinate_system.py000066400000000000000000000234451475425760300213110ustar00rootroot00000000000000""" This module is for building coordinate systems to be used when building a CRS. """ from pyproj._crs import CoordinateSystem from pyproj.crs.enums import ( Cartesian2DCSAxis, Ellipsoidal2DCSAxis, Ellipsoidal3DCSAxis, VerticalCSAxis, ) # useful constants to use when setting PROJ JSON units UNIT_METRE = "metre" UNIT_DEGREE = "degree" UNIT_FT = {"type": "LinearUnit", "name": "foot", "conversion_factor": 0.3048} UNIT_US_FT = { "type": "LinearUnit", "name": "US survey foot", "conversion_factor": 0.304800609601219, } _ELLIPSOIDAL_2D_AXIS_MAP = { Ellipsoidal2DCSAxis.LONGITUDE_LATITUDE: [ { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": UNIT_DEGREE, }, { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": UNIT_DEGREE, }, ], Ellipsoidal2DCSAxis.LATITUDE_LONGITUDE: [ { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": UNIT_DEGREE, }, { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": UNIT_DEGREE, }, ], } class Ellipsoidal2DCS(CoordinateSystem): """ .. versionadded:: 2.5.0 This generates an Ellipsoidal 2D Coordinate System """ def __new__( cls, axis: Ellipsoidal2DCSAxis | str = Ellipsoidal2DCSAxis.LONGITUDE_LATITUDE, ): """ Parameters ---------- axis: :class:`pyproj.crs.enums.Ellipsoidal2DCSAxis` or str, optional This is the axis order of the coordinate system. Default is :attr:`pyproj.crs.enums.Ellipsoidal2DCSAxis.LONGITUDE_LATITUDE`. """ return cls.from_json_dict( { "type": "CoordinateSystem", "subtype": "ellipsoidal", "axis": _ELLIPSOIDAL_2D_AXIS_MAP[Ellipsoidal2DCSAxis.create(axis)], } ) _ELLIPSOIDAL_3D_AXIS_MAP = { Ellipsoidal3DCSAxis.LONGITUDE_LATITUDE_HEIGHT: [ { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": UNIT_DEGREE, }, { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": UNIT_DEGREE, }, { "name": "Ellipsoidal height", "abbreviation": "h", "direction": "up", "unit": UNIT_METRE, }, ], Ellipsoidal3DCSAxis.LATITUDE_LONGITUDE_HEIGHT: [ { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": UNIT_DEGREE, }, { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": UNIT_DEGREE, }, { "name": "Ellipsoidal height", "abbreviation": "h", "direction": "up", "unit": UNIT_METRE, }, ], } class Ellipsoidal3DCS(CoordinateSystem): """ .. versionadded:: 2.5.0 This generates an Ellipsoidal 3D Coordinate System """ def __new__( cls, axis: Ellipsoidal3DCSAxis | str = Ellipsoidal3DCSAxis.LONGITUDE_LATITUDE_HEIGHT, ): """ Parameters ---------- axis: :class:`pyproj.crs.enums.Ellipsoidal3DCSAxis` or str, optional This is the axis order of the coordinate system. Default is :attr:`pyproj.crs.enums.Ellipsoidal3DCSAxis.LONGITUDE_LATITUDE_HEIGHT`. """ return cls.from_json_dict( { "type": "CoordinateSystem", "subtype": "ellipsoidal", "axis": _ELLIPSOIDAL_3D_AXIS_MAP[Ellipsoidal3DCSAxis.create(axis)], } ) _CARTESIAN_2D_AXIS_MAP = { Cartesian2DCSAxis.EASTING_NORTHING: [ { "name": "Easting", "abbreviation": "E", "direction": "east", "unit": UNIT_METRE, }, { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": UNIT_METRE, }, ], Cartesian2DCSAxis.NORTHING_EASTING: [ { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": UNIT_METRE, }, { "name": "Easting", "abbreviation": "E", "direction": "east", "unit": UNIT_METRE, }, ], Cartesian2DCSAxis.EASTING_NORTHING_FT: [ {"name": "Easting", "abbreviation": "X", "direction": "east", "unit": UNIT_FT}, { "name": "Northing", "abbreviation": "Y", "direction": "north", "unit": UNIT_FT, }, ], Cartesian2DCSAxis.NORTHING_EASTING_FT: [ { "name": "Northing", "abbreviation": "Y", "direction": "north", "unit": UNIT_FT, }, {"name": "Easting", "abbreviation": "X", "direction": "east", "unit": UNIT_FT}, ], Cartesian2DCSAxis.EASTING_NORTHING_US_FT: [ { "name": "Easting", "abbreviation": "X", "direction": "east", "unit": UNIT_US_FT, }, { "name": "Northing", "abbreviation": "Y", "direction": "north", "unit": UNIT_US_FT, }, ], Cartesian2DCSAxis.NORTHING_EASTING_US_FT: [ { "name": "Northing", "abbreviation": "Y", "direction": "north", "unit": UNIT_US_FT, }, { "name": "Easting", "abbreviation": "X", "direction": "east", "unit": UNIT_US_FT, }, ], Cartesian2DCSAxis.NORTH_POLE_EASTING_SOUTH_NORTHING_SOUTH: [ { "name": "Easting", "abbreviation": "E", "direction": "south", "unit": UNIT_METRE, }, { "name": "Northing", "abbreviation": "N", "direction": "south", "unit": UNIT_METRE, }, ], Cartesian2DCSAxis.SOUTH_POLE_EASTING_NORTH_NORTHING_NORTH: [ { "name": "Easting", "abbreviation": "E", "direction": "north", "unit": UNIT_METRE, }, { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": UNIT_METRE, }, ], Cartesian2DCSAxis.WESTING_SOUTHING: [ { "name": "Easting", "abbreviation": "Y", "direction": "west", "unit": UNIT_METRE, }, { "name": "Northing", "abbreviation": "X", "direction": "south", "unit": UNIT_METRE, }, ], } class Cartesian2DCS(CoordinateSystem): """ .. versionadded:: 2.5.0 This generates an Cartesian 2D Coordinate System """ def __new__( cls, axis: Cartesian2DCSAxis | str = Cartesian2DCSAxis.EASTING_NORTHING ): """ Parameters ---------- axis: :class:`pyproj.crs.enums.Cartesian2DCSAxis` or str, optional This is the axis order of the coordinate system. Default is :attr:`pyproj.crs.enums.Cartesian2DCSAxis.EASTING_NORTHING`. """ return cls.from_json_dict( { "type": "CoordinateSystem", "subtype": "Cartesian", "axis": _CARTESIAN_2D_AXIS_MAP[Cartesian2DCSAxis.create(axis)], } ) _VERTICAL_AXIS_MAP = { VerticalCSAxis.GRAVITY_HEIGHT: { "name": "Gravity-related height", "abbreviation": "H", "direction": "up", "unit": UNIT_METRE, }, VerticalCSAxis.GRAVITY_HEIGHT_US_FT: { "name": "Gravity-related height", "abbreviation": "H", "direction": "up", "unit": UNIT_US_FT, }, VerticalCSAxis.GRAVITY_HEIGHT_FT: { "name": "Gravity-related height", "abbreviation": "H", "direction": "up", "unit": UNIT_FT, }, VerticalCSAxis.DEPTH: { "name": "Depth", "abbreviation": "D", "direction": "down", "unit": UNIT_METRE, }, VerticalCSAxis.DEPTH_US_FT: { "name": "Depth", "abbreviation": "D", "direction": "down", "unit": UNIT_US_FT, }, VerticalCSAxis.DEPTH_FT: { "name": "Depth", "abbreviation": "D", "direction": "down", "unit": UNIT_FT, }, VerticalCSAxis.UP: { "name": "up", "abbreviation": "H", "direction": "up", "unit": UNIT_METRE, }, VerticalCSAxis.UP_FT: { "name": "up", "abbreviation": "H", "direction": "up", "unit": UNIT_FT, }, VerticalCSAxis.UP_US_FT: { "name": "up", "abbreviation": "H", "direction": "up", "unit": UNIT_US_FT, }, } class VerticalCS(CoordinateSystem): """ .. versionadded:: 2.5.0 This generates an Vertical Coordinate System """ def __new__(cls, axis: VerticalCSAxis | str = VerticalCSAxis.GRAVITY_HEIGHT): """ Parameters ---------- axis: :class:`pyproj.crs.enums.VerticalCSAxis` or str, optional This is the axis direction of the coordinate system. Default is :attr:`pyproj.crs.enums.VerticalCSAxis.GRAVITY_HEIGHT`. """ return cls.from_json_dict( { "type": "CoordinateSystem", "subtype": "vertical", "axis": [_VERTICAL_AXIS_MAP[VerticalCSAxis.create(axis)]], } ) pyproj-3.7.1/pyproj/crs/crs.py000066400000000000000000001757501475425760300163540ustar00rootroot00000000000000""" This module interfaces with PROJ to produce a pythonic interface to the coordinate reference system (CRS) information. """ # pylint: disable=too-many-lines import json import re import threading import warnings from collections.abc import Callable from typing import Any, Optional from pyproj._crs import ( _CRS, AreaOfUse, AuthorityMatchInfo, Axis, CoordinateOperation, CoordinateSystem, Datum, Ellipsoid, PrimeMeridian, _load_proj_json, is_proj, is_wkt, ) from pyproj.crs._cf1x8 import ( _GEOGRAPHIC_GRID_MAPPING_NAME_MAP, _GRID_MAPPING_NAME_MAP, _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP, _INVERSE_GRID_MAPPING_NAME_MAP, _horizontal_datum_from_params, _try_list_if_string, ) from pyproj.crs.coordinate_operation import ToWGS84Transformation from pyproj.crs.coordinate_system import Cartesian2DCS, Ellipsoidal2DCS, VerticalCS from pyproj.enums import ProjVersion, WktVersion from pyproj.exceptions import CRSError from pyproj.geod import Geod _RE_PROJ_PARAM = re.compile( r""" \+ # parameter starts with '+' character (?P\w+) # capture parameter name \=? # match both key only and key-value parameters (?P\S+)? # capture all characters up to next space (None if no value) \s*? # consume remaining whitespace, if any """, re.X, ) class CRSLocal(threading.local): """ Threading local instance for cython CRS class. For more details, see: https://github.com/pyproj4/pyproj/issues/782 """ def __init__(self): self.crs = None # Initialises in each thread super().__init__() def _prepare_from_dict(projparams: dict, allow_json: bool = True) -> str: if not isinstance(projparams, dict): raise CRSError("CRS input is not a dict") # check if it is a PROJ JSON dict if "proj" not in projparams and "init" not in projparams and allow_json: return json.dumps(projparams) # convert a dict to a proj string. pjargs = [] for key, value in projparams.items(): # the towgs84 as list if isinstance(value, (list, tuple)): value = ",".join([str(val) for val in value]) # issue 183 (+ no_rot) if value is None or str(value) == "True": pjargs.append(f"+{key}") elif str(value) == "False": pass else: pjargs.append(f"+{key}={value}") return _prepare_from_string(" ".join(pjargs)) def _prepare_from_proj_string(in_crs_string: str) -> str: in_crs_string = re.sub(r"[\s+]?=[\s+]?", "=", in_crs_string.lstrip()) # make sure the projection starts with +proj or +init starting_params = ("+init", "+proj", "init", "proj") if not in_crs_string.startswith(starting_params): kvpairs: list[str] = [] first_item_inserted = False for kvpair in in_crs_string.split(): if not first_item_inserted and (kvpair.startswith(starting_params)): kvpairs.insert(0, kvpair) first_item_inserted = True else: kvpairs.append(kvpair) in_crs_string = " ".join(kvpairs) # make sure it is the CRS type if "type=crs" not in in_crs_string: if "+" in in_crs_string: in_crs_string += " +type=crs" else: in_crs_string += " type=crs" # look for EPSG, replace with epsg (EPSG only works # on case-insensitive filesystems). in_crs_string = in_crs_string.replace("+init=EPSG", "+init=epsg").strip() if in_crs_string.startswith(("+init", "init")): warnings.warn( "'+init=:' syntax is deprecated. " "':' is the preferred initialization method. " "When making the change, be mindful of axis order changes: " "https://pyproj4.github.io/pyproj/stable/gotchas.html" "#axis-order-changes-in-proj-6", FutureWarning, stacklevel=2, ) return in_crs_string def _prepare_from_string(in_crs_string: str) -> str: if not isinstance(in_crs_string, str): raise CRSError("CRS input is not a string") if not in_crs_string: raise CRSError(f"CRS string is empty or invalid: {in_crs_string!r}") if "{" in in_crs_string: # may be json, try to decode it try: crs_dict = json.loads(in_crs_string, strict=False) except ValueError as err: raise CRSError("CRS appears to be JSON but is not valid") from err if not crs_dict: raise CRSError("CRS is empty JSON") in_crs_string = _prepare_from_dict(crs_dict) elif is_proj(in_crs_string): in_crs_string = _prepare_from_proj_string(in_crs_string) return in_crs_string def _prepare_from_authority(auth_name: str, auth_code: str | int): return f"{auth_name}:{auth_code}" def _prepare_from_epsg(auth_code: str | int): return _prepare_from_authority("EPSG", auth_code) def _is_epsg_code(auth_code: Any) -> bool: if isinstance(auth_code, int): return True if isinstance(auth_code, str) and auth_code.isnumeric(): return True if hasattr(auth_code, "shape") and auth_code.shape == (): return True return False class CRS: """ A pythonic Coordinate Reference System manager. .. versionadded:: 2.0.0 See: :c:func:`proj_create` The functionality is based on other fantastic projects: * `rasterio `_ # noqa: E501 * `opendatacube `_ # noqa: E501 Attributes ---------- srs: str The string form of the user input used to create the CRS. """ def __init__(self, projparams: Any | None = None, **kwargs) -> None: """ Initialize a CRS class instance with: - PROJ string - Dictionary of PROJ parameters - PROJ keyword arguments for parameters - JSON string with PROJ parameters - CRS WKT string - An authority string [i.e. 'epsg:4326'] - An EPSG integer code [i.e. 4326] - A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')] - An object with a `to_wkt` method. - A :class:`pyproj.crs.CRS` class Example usage: >>> from pyproj import CRS >>> crs_utm = CRS.from_user_input(26915) >>> crs_utm Name: NAD83 / UTM zone 15N Axis Info [cartesian]: - E[east]: Easting (metre) - N[north]: Northing (metre) Area of Use: - name: North America - 96°W to 90°W and NAD83 by country - bounds: (-96.0, 25.61, -90.0, 84.0) Coordinate Operation: - name: UTM zone 15N - method: Transverse Mercator Datum: North American Datum 1983 - Ellipsoid: GRS 1980 - Prime Meridian: Greenwich >>> crs_utm.area_of_use.bounds (-96.0, 25.61, -90.0, 84.0) >>> crs_utm.ellipsoid ELLIPSOID["GRS 1980",6378137,298.257222101, LENGTHUNIT["metre",1], ID["EPSG",7019]] >>> crs_utm.ellipsoid.inverse_flattening 298.257222101 >>> crs_utm.ellipsoid.semi_major_metre 6378137.0 >>> crs_utm.ellipsoid.semi_minor_metre 6356752.314140356 >>> crs_utm.prime_meridian PRIMEM["Greenwich",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8901]] >>> crs_utm.prime_meridian.unit_name 'degree' >>> crs_utm.prime_meridian.unit_conversion_factor 0.017453292519943295 >>> crs_utm.prime_meridian.longitude 0.0 >>> crs_utm.datum DATUM["North American Datum 1983", ELLIPSOID["GRS 1980",6378137,298.257222101, LENGTHUNIT["metre",1]], ID["EPSG",6269]] >>> crs_utm.coordinate_system CS[Cartesian,2], AXIS["(E)",east, ORDER[1], LENGTHUNIT["metre",1, ID["EPSG",9001]]], AXIS["(N)",north, ORDER[2], LENGTHUNIT["metre",1, ID["EPSG",9001]]] >>> print(crs_utm.coordinate_operation.to_wkt(pretty=True)) CONVERSION["UTM zone 15N", METHOD["Transverse Mercator", ID["EPSG",9807]], PARAMETER["Latitude of natural origin",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8801]], PARAMETER["Longitude of natural origin",-93, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8802]], PARAMETER["Scale factor at natural origin",0.9996, SCALEUNIT["unity",1], ID["EPSG",8805]], PARAMETER["False easting",500000, LENGTHUNIT["metre",1], ID["EPSG",8806]], PARAMETER["False northing",0, LENGTHUNIT["metre",1], ID["EPSG",8807]], ID["EPSG",16015]] >>> crs = CRS(proj='utm', zone=10, ellps='WGS84') >>> print(crs.to_wkt(pretty=True)) PROJCRS["unknown", BASEGEOGCRS["unknown", DATUM["Unknown based on WGS84 ellipsoid", ELLIPSOID["WGS 84",6378137,298.257223563, LENGTHUNIT["metre",1], ID["EPSG",7030]]], PRIMEM["Greenwich",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8901]]], CONVERSION["UTM zone 10N", METHOD["Transverse Mercator", ID["EPSG",9807]], PARAMETER["Latitude of natural origin",0, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8801]], PARAMETER["Longitude of natural origin",-123, ANGLEUNIT["degree",0.0174532925199433], ID["EPSG",8802]], PARAMETER["Scale factor at natural origin",0.9996, SCALEUNIT["unity",1], ID["EPSG",8805]], PARAMETER["False easting",500000, LENGTHUNIT["metre",1], ID["EPSG",8806]], PARAMETER["False northing",0, LENGTHUNIT["metre",1], ID["EPSG",8807]], ID["EPSG",16010]], CS[Cartesian,2], AXIS["(E)",east, ORDER[1], LENGTHUNIT["metre",1, ID["EPSG",9001]]], AXIS["(N)",north, ORDER[2], LENGTHUNIT["metre",1, ID["EPSG",9001]]]] >>> geod = crs.get_geod() >>> f"+a={geod.a:.0f} +f={geod.f:.8f}" '+a=6378137 +f=0.00335281' >>> crs.is_projected True >>> crs.is_geographic False """ projstring = "" if projparams: if isinstance(projparams, _CRS): projstring = projparams.srs elif _is_epsg_code(projparams): projstring = _prepare_from_epsg(projparams) elif isinstance(projparams, str): projstring = _prepare_from_string(projparams) elif isinstance(projparams, dict): projstring = _prepare_from_dict(projparams) elif isinstance(projparams, (list, tuple)) and len(projparams) == 2: projstring = _prepare_from_authority(*projparams) elif hasattr(projparams, "to_wkt"): projstring = projparams.to_wkt() else: raise CRSError(f"Invalid CRS input: {projparams!r}") if kwargs: projkwargs = _prepare_from_dict(kwargs, allow_json=False) projstring = _prepare_from_string(" ".join((projstring, projkwargs))) self.srs = projstring self._local = CRSLocal() if isinstance(projparams, _CRS): self._local.crs = projparams else: self._local.crs = _CRS(self.srs) @property def _crs(self): """ Retrieve the Cython based _CRS object for this thread. """ if self._local.crs is None: self._local.crs = _CRS(self.srs) return self._local.crs @classmethod def from_authority(cls, auth_name: str, code: str | int) -> "CRS": """ .. versionadded:: 2.2.0 Make a CRS from an authority name and authority code Parameters ---------- auth_name: str The name of the authority. code : int or str The code used by the authority. Returns ------- CRS """ return cls.from_user_input(_prepare_from_authority(auth_name, code)) @classmethod def from_epsg(cls, code: str | int) -> "CRS": """Make a CRS from an EPSG code Parameters ---------- code : int or str An EPSG code. Returns ------- CRS """ return cls.from_user_input(_prepare_from_epsg(code)) @classmethod def from_proj4(cls, in_proj_string: str) -> "CRS": """ .. versionadded:: 2.2.0 Make a CRS from a PROJ string Parameters ---------- in_proj_string : str A PROJ string. Returns ------- CRS """ if not is_proj(in_proj_string): raise CRSError(f"Invalid PROJ string: {in_proj_string}") return cls.from_user_input(_prepare_from_proj_string(in_proj_string)) @classmethod def from_wkt(cls, in_wkt_string: str) -> "CRS": """ .. versionadded:: 2.2.0 Make a CRS from a WKT string Parameters ---------- in_wkt_string : str A WKT string. Returns ------- CRS """ if not is_wkt(in_wkt_string): raise CRSError(f"Invalid WKT string: {in_wkt_string}") return cls.from_user_input(_prepare_from_string(in_wkt_string)) @classmethod def from_string(cls, in_crs_string: str) -> "CRS": """ Make a CRS from: Initialize a CRS class instance with: - PROJ string - JSON string with PROJ parameters - CRS WKT string - An authority string [i.e. 'epsg:4326'] Parameters ---------- in_crs_string : str An EPSG, PROJ, or WKT string. Returns ------- CRS """ return cls.from_user_input(_prepare_from_string(in_crs_string)) def to_string(self) -> str: """ .. versionadded:: 2.2.0 Convert the CRS to a string. It attempts to convert it to the authority string. Otherwise, it uses the string format of the user input to create the CRS. Returns ------- str """ auth_info = self.to_authority(min_confidence=100) if auth_info: return ":".join(auth_info) return self.srs @classmethod def from_user_input(cls, value: Any, **kwargs) -> "CRS": """ Initialize a CRS class instance with: - PROJ string - Dictionary of PROJ parameters - PROJ keyword arguments for parameters - JSON string with PROJ parameters - CRS WKT string - An authority string [i.e. 'epsg:4326'] - An EPSG integer code [i.e. 4326] - A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')] - An object with a `to_wkt` method. - A :class:`pyproj.crs.CRS` class Parameters ---------- value : obj A Python int, dict, or str. Returns ------- CRS """ if isinstance(value, cls): return value return cls(value, **kwargs) def get_geod(self) -> Geod | None: """ Returns ------- pyproj.geod.Geod: Geod object based on the ellipsoid. """ if self.ellipsoid is None: return None return Geod( a=self.ellipsoid.semi_major_metre, rf=self.ellipsoid.inverse_flattening, b=self.ellipsoid.semi_minor_metre, ) @classmethod def from_dict(cls, proj_dict: dict) -> "CRS": """ .. versionadded:: 2.2.0 Make a CRS from a dictionary of PROJ parameters. Parameters ---------- proj_dict : str PROJ params in dict format. Returns ------- CRS """ return cls.from_user_input(_prepare_from_dict(proj_dict)) @classmethod def from_json(cls, crs_json: str) -> "CRS": """ .. versionadded:: 2.4.0 Create CRS from a CRS JSON string. Parameters ---------- crs_json: str CRS JSON string. Returns ------- CRS """ return cls.from_user_input(_load_proj_json(crs_json)) @classmethod def from_json_dict(cls, crs_dict: dict) -> "CRS": """ .. versionadded:: 2.4.0 Create CRS from a JSON dictionary. Parameters ---------- crs_dict: dict CRS dictionary. Returns ------- CRS """ return cls.from_user_input(json.dumps(crs_dict)) def to_dict(self) -> dict: """ .. versionadded:: 2.2.0 Converts the CRS to dictionary of PROJ parameters. .. warning:: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501 Returns ------- dict: PROJ params in dict format. """ proj_string = self.to_proj4() if proj_string is None: return {} def _parse(val): if val.lower() == "true": return True if val.lower() == "false": return False try: return int(val) except ValueError: pass try: return float(val) except ValueError: pass return _try_list_if_string(val) proj_dict = {} for param in _RE_PROJ_PARAM.finditer(proj_string): key, value = param.groups() if value is not None: value = _parse(value) if value is not False: proj_dict[key] = value return proj_dict def to_cf( self, wkt_version: WktVersion | str = WktVersion.WKT2_2019, errcheck: bool = False, ) -> dict: """ .. versionadded:: 2.2.0 This converts a :obj:`pyproj.crs.CRS` object to a Climate and Forecast (CF) Grid Mapping Version 1.8 dict. :ref:`build_crs_cf` Parameters ---------- wkt_version: str or pyproj.enums.WktVersion Version of WKT supported by CRS.to_wkt. Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`. errcheck: bool, default=False If True, will warn when parameters are ignored. Returns ------- dict: CF-1.8 version of the projection. """ # pylint: disable=too-many-branches,too-many-return-statements cf_dict: dict[str, Any] = {"crs_wkt": self.to_wkt(wkt_version)} # handle bound CRS if ( self.is_bound and self.coordinate_operation and self.coordinate_operation.towgs84 and self.source_crs ): sub_cf: dict[str, Any] = self.source_crs.to_cf( wkt_version=wkt_version, errcheck=errcheck, ) sub_cf.pop("crs_wkt") cf_dict.update(sub_cf) cf_dict["towgs84"] = self.coordinate_operation.towgs84 return cf_dict # handle compound CRS if self.is_compound: for sub_crs in self.sub_crs_list: sub_cf = sub_crs.to_cf(wkt_version=wkt_version, errcheck=errcheck) sub_cf.pop("crs_wkt") cf_dict.update(sub_cf) return cf_dict # handle vertical CRS if self.is_vertical: vert_json = self.to_json_dict() if "geoid_model" in vert_json: cf_dict["geoid_name"] = vert_json["geoid_model"]["name"] if self.datum: cf_dict["geopotential_datum_name"] = self.datum.name return cf_dict # write out datum parameters if self.ellipsoid: cf_dict.update( semi_major_axis=self.ellipsoid.semi_major_metre, semi_minor_axis=self.ellipsoid.semi_minor_metre, inverse_flattening=self.ellipsoid.inverse_flattening, ) cf_dict["reference_ellipsoid_name"] = self.ellipsoid.name if self.prime_meridian: cf_dict["longitude_of_prime_meridian"] = self.prime_meridian.longitude cf_dict["prime_meridian_name"] = self.prime_meridian.name # handle geographic CRS if self.geodetic_crs: cf_dict["geographic_crs_name"] = self.geodetic_crs.name if self.geodetic_crs.datum: cf_dict["horizontal_datum_name"] = self.geodetic_crs.datum.name if self.is_geographic: if self.coordinate_operation: if ( self.coordinate_operation.method_name.lower() not in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP ): if errcheck: warnings.warn( "Unsupported coordinate operation: " f"{self.coordinate_operation.method_name}" ) return {"crs_wkt": cf_dict["crs_wkt"]} cf_dict.update( _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP[ self.coordinate_operation.method_name.lower() ](self.coordinate_operation) ) else: cf_dict["grid_mapping_name"] = "latitude_longitude" return cf_dict # handle projected CRS coordinate_operation = None if not self.is_bound and self.is_projected: coordinate_operation = self.coordinate_operation cf_dict["projected_crs_name"] = self.name coordinate_operation_name = ( None if not coordinate_operation else coordinate_operation.method_name.lower().replace(" ", "_") ) if coordinate_operation_name not in _INVERSE_GRID_MAPPING_NAME_MAP: if errcheck: if coordinate_operation: warnings.warn( "Unsupported coordinate operation: " f"{coordinate_operation.method_name}" ) else: warnings.warn("Coordinate operation not found.") return {"crs_wkt": cf_dict["crs_wkt"]} cf_dict.update( _INVERSE_GRID_MAPPING_NAME_MAP[coordinate_operation_name]( coordinate_operation ) ) return cf_dict @staticmethod def from_cf( in_cf: dict, ellipsoidal_cs: Any | None = None, cartesian_cs: Any | None = None, vertical_cs: Any | None = None, ) -> "CRS": """ .. versionadded:: 2.2.0 .. versionadded:: 3.0.0 ellipsoidal_cs, cartesian_cs, vertical_cs This converts a Climate and Forecast (CF) Grid Mapping Version 1.8 dict to a :obj:`pyproj.crs.CRS` object. :ref:`build_crs_cf` Parameters ---------- in_cf: dict CF version of the projection. ellipsoidal_cs: Any, optional Input to create an Ellipsoidal Coordinate System. Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`. cartesian_cs: Any, optional Input to create a Cartesian Coordinate System. Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`. vertical_cs: Any, optional Input to create a Vertical Coordinate System accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or :class:`pyproj.crs.coordinate_system.VerticalCS` Returns ------- CRS """ # pylint: disable=too-many-branches unknown_names = ("unknown", "undefined") if "crs_wkt" in in_cf: return CRS(in_cf["crs_wkt"]) if "spatial_ref" in in_cf: # for previous supported WKT key return CRS(in_cf["spatial_ref"]) grid_mapping_name = in_cf.get("grid_mapping_name") if grid_mapping_name is None: raise CRSError("CF projection parameters missing 'grid_mapping_name'") # build datum if possible datum = _horizontal_datum_from_params(in_cf) # build geographic CRS try: geographic_conversion_method: None | Callable = ( _GEOGRAPHIC_GRID_MAPPING_NAME_MAP[grid_mapping_name] ) except KeyError: geographic_conversion_method = None geographic_crs_name = in_cf.get("geographic_crs_name") if datum: geographic_crs: CRS = GeographicCRS( name=geographic_crs_name or "undefined", datum=datum, ellipsoidal_cs=ellipsoidal_cs, ) elif geographic_crs_name and geographic_crs_name not in unknown_names: geographic_crs = CRS(geographic_crs_name) if ellipsoidal_cs is not None: geographic_crs_json = geographic_crs.to_json_dict() geographic_crs_json["coordinate_system"] = ( CoordinateSystem.from_user_input(ellipsoidal_cs).to_json_dict() ) geographic_crs = CRS(geographic_crs_json) else: geographic_crs = GeographicCRS(ellipsoidal_cs=ellipsoidal_cs) if grid_mapping_name == "latitude_longitude": return geographic_crs if geographic_conversion_method is not None: return DerivedGeographicCRS( base_crs=geographic_crs, conversion=geographic_conversion_method(in_cf), ellipsoidal_cs=ellipsoidal_cs, ) # build projected CRS try: conversion_method = _GRID_MAPPING_NAME_MAP[grid_mapping_name] except KeyError: raise CRSError( f"Unsupported grid mapping name: {grid_mapping_name}" ) from None projected_crs = ProjectedCRS( name=in_cf.get("projected_crs_name", "undefined"), conversion=conversion_method(in_cf), geodetic_crs=geographic_crs, cartesian_cs=cartesian_cs, ) # build bound CRS if exists bound_crs = None if "towgs84" in in_cf: bound_crs = BoundCRS( source_crs=projected_crs, target_crs="WGS 84", transformation=ToWGS84Transformation( projected_crs.geodetic_crs, *_try_list_if_string(in_cf["towgs84"]) ), ) if "geopotential_datum_name" not in in_cf: return bound_crs or projected_crs # build Vertical CRS vertical_crs = VerticalCRS( name="undefined", datum=in_cf["geopotential_datum_name"], geoid_model=in_cf.get("geoid_name"), vertical_cs=vertical_cs, ) # build compound CRS return CompoundCRS( name="undefined", components=[bound_crs or projected_crs, vertical_crs] ) def cs_to_cf(self) -> list[dict]: """ .. versionadded:: 3.0.0 This converts all coordinate systems (cs) in the CRS to a list of Climate and Forecast (CF) Version 1.8 dicts. :ref:`build_crs_cf` Returns ------- list[dict]: CF-1.8 version of the coordinate systems. """ cf_axis_list = [] def rotated_pole(crs): try: return ( crs.coordinate_operation and crs.coordinate_operation.method_name.lower() in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP ) except KeyError: return False if self.type_name == "Temporal CRS" and self.datum: datum_json = self.datum.to_json_dict() origin = datum_json.get("time_origin", "1875-05-20").strip().rstrip("zZ") if len(origin) == 4: origin = f"{origin}-01-01" axis = self.axis_info[0] cf_temporal_axis = { "standard_name": "time", "long_name": "time", "calendar": ( datum_json.get("calendar", "proleptic_gregorian") .lower() .replace(" ", "_") ), "axis": "T", } unit_name = axis.unit_name.lower().replace("calendar", "").strip() # no units for TemporalDateTime if unit_name: cf_temporal_axis["units"] = f"{unit_name} since {origin}" cf_axis_list.append(cf_temporal_axis) if self.coordinate_system: cf_axis_list.extend( self.coordinate_system.to_cf(rotated_pole=rotated_pole(self)) ) elif self.is_bound and self.source_crs and self.source_crs.coordinate_system: cf_axis_list.extend( self.source_crs.coordinate_system.to_cf( rotated_pole=rotated_pole(self.source_crs) ) ) else: for sub_crs in self.sub_crs_list: cf_axis_list.extend(sub_crs.cs_to_cf()) return cf_axis_list def is_exact_same(self, other: Any) -> bool: """ Check if the CRS objects are the exact same. Parameters ---------- other: Any Check if the other CRS is the exact same to this object. If the other object is not a CRS, it will try to create one. On Failure, it will return False. Returns ------- bool """ try: other = CRS.from_user_input(other) except CRSError: return False return self._crs.is_exact_same(other._crs) def equals(self, other: Any, ignore_axis_order: bool = False) -> bool: """ .. versionadded:: 2.5.0 Check if the CRS objects are equivalent. Parameters ---------- other: Any Check if the other object is equivalent to this object. If the other object is not a CRS, it will try to create one. On Failure, it will return False. ignore_axis_order: bool, default=False If True, it will compare the CRS class and ignore the axis order. Returns ------- bool """ try: other = CRS.from_user_input(other) except CRSError: return False return self._crs.equals(other._crs, ignore_axis_order=ignore_axis_order) @property def geodetic_crs(self) -> Optional["CRS"]: """ .. versionadded:: 2.2.0 Returns ------- CRS: The geodeticCRS / geographicCRS from the CRS. """ return ( None if self._crs.geodetic_crs is None else self.__class__(self._crs.geodetic_crs) ) @property def source_crs(self) -> Optional["CRS"]: """ The base CRS of a BoundCRS or a DerivedCRS/ProjectedCRS, or the source CRS of a CoordinateOperation. Returns ------- CRS """ return ( None if self._crs.source_crs is None else self.__class__(self._crs.source_crs) ) @property def target_crs(self) -> Optional["CRS"]: """ .. versionadded:: 2.2.0 Returns ------- CRS: The hub CRS of a BoundCRS or the target CRS of a CoordinateOperation. """ return ( None if self._crs.target_crs is None else self.__class__(self._crs.target_crs) ) @property def sub_crs_list(self) -> list["CRS"]: """ If the CRS is a compound CRS, it will return a list of sub CRS objects. Returns ------- list[CRS] """ return [self.__class__(sub_crs) for sub_crs in self._crs.sub_crs_list] @property def utm_zone(self) -> str | None: """ .. versionadded:: 2.6.0 Finds the UTM zone in a Projected CRS, Bound CRS, or Compound CRS Returns ------- str | None: The UTM zone number and letter if applicable. """ if self.is_bound and self.source_crs: return self.source_crs.utm_zone if self.sub_crs_list: for sub_crs in self.sub_crs_list: if sub_crs.utm_zone: return sub_crs.utm_zone elif ( self.coordinate_operation and "UTM ZONE" in self.coordinate_operation.name.upper() ): return self.coordinate_operation.name.upper().split("UTM ZONE ")[-1] return None @property def name(self) -> str: """ Returns ------- str: The name of the CRS (from :cpp:func:`proj_get_name`). """ return self._crs.name @property def type_name(self) -> str: """ Returns ------- str: The name of the type of the CRS object. """ return self._crs.type_name @property def axis_info(self) -> list[Axis]: """ Retrieves all relevant axis information in the CRS. If it is a Bound CRS, it gets the axis list from the Source CRS. If it is a Compound CRS, it gets the axis list from the Sub CRS list. Returns ------- list[Axis]: The list of axis information. """ return self._crs.axis_info @property def area_of_use(self) -> AreaOfUse | None: """ Returns ------- AreaOfUse: The area of use object with associated attributes. """ return self._crs.area_of_use @property def ellipsoid(self) -> Ellipsoid | None: """ .. versionadded:: 2.2.0 Returns ------- Ellipsoid: The ellipsoid object with associated attributes. """ return self._crs.ellipsoid @property def prime_meridian(self) -> PrimeMeridian | None: """ .. versionadded:: 2.2.0 Returns ------- PrimeMeridian: The prime meridian object with associated attributes. """ return self._crs.prime_meridian @property def datum(self) -> Datum | None: """ .. versionadded:: 2.2.0 Returns ------- Datum """ return self._crs.datum @property def coordinate_system(self) -> CoordinateSystem | None: """ .. versionadded:: 2.2.0 Returns ------- CoordinateSystem """ return self._crs.coordinate_system @property def coordinate_operation(self) -> CoordinateOperation | None: """ .. versionadded:: 2.2.0 Returns ------- CoordinateOperation """ return self._crs.coordinate_operation @property def remarks(self) -> str: """ .. versionadded:: 2.4.0 Returns ------- str: Remarks about object. """ return self._crs.remarks @property def scope(self) -> str: """ .. versionadded:: 2.4.0 Returns ------- str: Scope of object. """ return self._crs.scope def to_wkt( self, version: WktVersion | str = WktVersion.WKT2_2019, pretty: bool = False, output_axis_rule: bool | None = None, ) -> str: """ Convert the projection to a WKT string. Version options: - WKT2_2015 - WKT2_2015_SIMPLIFIED - WKT2_2019 - WKT2_2019_SIMPLIFIED - WKT1_GDAL - WKT1_ESRI .. versionadded:: 3.6.0 output_axis_rule Parameters ---------- version: pyproj.enums.WktVersion, optional The version of the WKT output. Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`. pretty: bool, default=False If True, it will set the output to be a multiline string. output_axis_rule: bool, optional, default=None If True, it will set the axis rule on any case. If false, never. None for AUTO, that depends on the CRS and version. Returns ------- str """ wkt = self._crs.to_wkt( version=version, pretty=pretty, output_axis_rule=output_axis_rule ) if wkt is None: raise CRSError( f"CRS cannot be converted to a WKT string of a '{version}' version. " "Select a different version of a WKT string or edit your CRS." ) return wkt def to_json(self, pretty: bool = False, indentation: int = 2) -> str: """ .. versionadded:: 2.4.0 Convert the object to a JSON string. Parameters ---------- pretty: bool, default=False If True, it will set the output to be a multiline string. indentation: int, default=2 If pretty is True, it will set the width of the indentation. Returns ------- str """ proj_json = self._crs.to_json(pretty=pretty, indentation=indentation) if proj_json is None: raise CRSError("CRS cannot be converted to a PROJ JSON string.") return proj_json def to_json_dict(self) -> dict: """ .. versionadded:: 2.4.0 Convert the object to a JSON dictionary. Returns ------- dict """ return self._crs.to_json_dict() def to_proj4(self, version: ProjVersion | int = ProjVersion.PROJ_5) -> str: """ Convert the projection to a PROJ string. .. warning:: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501 Parameters ---------- version: pyproj.enums.ProjVersion The version of the PROJ string output. Default is :attr:`pyproj.enums.ProjVersion.PROJ_4`. Returns ------- str """ proj = self._crs.to_proj4(version=version) if proj is None: raise CRSError("CRS cannot be converted to a PROJ string.") return proj def to_epsg(self, min_confidence: int = 70) -> int | None: """ Return the EPSG code best matching the CRS or None if it a match is not found. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.to_epsg() 4328 If the CRS is bound, you can attempt to get an epsg code from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.to_epsg() >>> ccs.source_crs.to_epsg() 4978 >>> ccs == CRS.from_epsg(4978) False Parameters ---------- min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- int | None: The best matching EPSG code matching the confidence level. """ return self._crs.to_epsg(min_confidence=min_confidence) def to_authority(self, auth_name: str | None = None, min_confidence: int = 70): """ .. versionadded:: 2.2.0 Return the authority name and code best matching the CRS or None if it a match is not found. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.to_authority() ('EPSG', '4328') If the CRS is bound, you can get an authority from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.to_authority() >>> ccs.source_crs.to_authority() ('EPSG', '4978') >>> ccs == CRS.from_authorty('EPSG', '4978') False Parameters ---------- auth_name: str, optional The name of the authority to filter by. min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- tuple(str, str) or None: The best matching (, ) for the confidence level. """ return self._crs.to_authority( auth_name=auth_name, min_confidence=min_confidence ) def list_authority( self, auth_name: str | None = None, min_confidence: int = 70 ) -> list[AuthorityMatchInfo]: """ .. versionadded:: 3.2.0 Return the authority names and codes best matching the CRS. Example: >>> from pyproj import CRS >>> ccs = CRS("EPSG:4328") >>> ccs.list_authority() [AuthorityMatchInfo(auth_name='EPSG', code='4326', confidence=100)] If the CRS is bound, you can get an authority from the source CRS: >>> from pyproj import CRS >>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0") >>> ccs.list_authority() [] >>> ccs.source_crs.list_authority() [AuthorityMatchInfo(auth_name='EPSG', code='4978', confidence=70)] >>> ccs == CRS.from_authorty('EPSG', '4978') False Parameters ---------- auth_name: str, optional The name of the authority to filter by. min_confidence: int, default=70 A value between 0-100 where 100 is the most confident. :ref:`min_confidence` Returns ------- list[AuthorityMatchInfo]: List of authority matches for the CRS. """ return self._crs.list_authority( auth_name=auth_name, min_confidence=min_confidence ) def to_3d(self, name: str | None = None) -> "CRS": """ .. versionadded:: 3.1.0 Convert the current CRS to the 3D version if it makes sense. New vertical axis attributes: - ellipsoidal height - oriented upwards - metre units Parameters ---------- name: str, optional CRS name. Defaults to use the name of the original CRS. Returns ------- CRS """ return self.__class__(self._crs.to_3d(name=name)) def to_2d(self, name: str | None = None) -> "CRS": """ .. versionadded:: 3.6.0 Convert the current CRS to the 2D version if it makes sense. Parameters ---------- name: str, optional CRS name. Defaults to use the name of the original CRS. Returns ------- CRS """ return self.__class__(self._crs.to_2d(name=name)) @property def is_geographic(self) -> bool: """ This checks if the CRS is geographic. It will check if it has a geographic CRS in the sub CRS if it is a compound CRS and will check if the source CRS is geographic if it is a bound CRS. Returns ------- bool: True if the CRS is in geographic (lon/lat) coordinates. """ return self._crs.is_geographic @property def is_projected(self) -> bool: """ This checks if the CRS is projected. It will check if it has a projected CRS in the sub CRS if it is a compound CRS and will check if the source CRS is projected if it is a bound CRS. Returns ------- bool: True if CRS is projected. """ return self._crs.is_projected @property def is_vertical(self) -> bool: """ .. versionadded:: 2.2.0 This checks if the CRS is vertical. It will check if it has a vertical CRS in the sub CRS if it is a compound CRS and will check if the source CRS is vertical if it is a bound CRS. Returns ------- bool: True if CRS is vertical. """ return self._crs.is_vertical @property def is_bound(self) -> bool: """ Returns ------- bool: True if CRS is bound. """ return self._crs.is_bound @property def is_compound(self) -> bool: """ .. versionadded:: 3.1.0 Returns ------- bool: True if CRS is compound. """ return self._crs.is_compound @property def is_engineering(self) -> bool: """ .. versionadded:: 2.2.0 Returns ------- bool: True if CRS is local/engineering. """ return self._crs.is_engineering @property def is_geocentric(self) -> bool: """ This checks if the CRS is geocentric and takes into account if the CRS is bound. Returns ------- bool: True if CRS is in geocentric (x/y) coordinates. """ return self._crs.is_geocentric @property def is_derived(self): """ .. versionadded:: 3.2.0 Returns ------- bool: True if CRS is a Derived CRS. """ return self._crs.is_derived @property def is_deprecated(self) -> bool: """ .. versionadded:: 3.7.0 Check if the CRS is deprecated Returns ------- bool """ return self._crs.is_deprecated def get_non_deprecated(self) -> list["CRS"]: """ .. versionadded:: 3.7.0 Return a list of non-deprecated objects related to this. Returns ------- list[CRS] """ return self._crs.get_non_deprecated() def __eq__(self, other: object) -> bool: return self.equals(other) def __getstate__(self) -> dict[str, str]: return {"srs": self.srs} def __setstate__(self, state: dict[str, Any]): self.__dict__.update(state) self._local = CRSLocal() def __hash__(self) -> int: return hash(self.to_wkt()) def __str__(self) -> str: return self.srs def __repr__(self) -> str: # get axis information axis_info_list: list[str] = [] for axis in self.axis_info: axis_info_list.extend(["- ", str(axis), "\n"]) axis_info_str = "".join(axis_info_list) # get coordinate system & sub CRS info source_crs_repr = "" sub_crs_repr = "" if self.coordinate_system and self.coordinate_system.axis_list: coordinate_system_name = str(self.coordinate_system) elif self.is_bound and self.source_crs: coordinate_system_name = str(self.source_crs.coordinate_system) source_crs_repr = f"Source CRS: {self.source_crs.name}\n" else: coordinate_system_names = [] sub_crs_repr_list = ["Sub CRS:\n"] for sub_crs in self.sub_crs_list: coordinate_system_names.append(str(sub_crs.coordinate_system)) sub_crs_repr_list.extend(["- ", sub_crs.name, "\n"]) coordinate_system_name = "|".join(coordinate_system_names) sub_crs_repr = "".join(sub_crs_repr_list) # get coordinate operation repr coordinate_operation = "" if self.coordinate_operation: coordinate_operation = "".join( [ "Coordinate Operation:\n", "- name: ", str(self.coordinate_operation), "\n- method: ", self.coordinate_operation.method_name, "\n", ] ) # get SRS representation srs_repr = self.to_string() srs_repr = srs_repr if len(srs_repr) <= 50 else " ".join([srs_repr[:50], "..."]) axis_info_str = axis_info_str or "- undefined\n" return ( f"<{self.type_name}: {srs_repr}>\n" f"Name: {self.name}\n" f"Axis Info [{coordinate_system_name or 'undefined'}]:\n" f"{axis_info_str}" "Area of Use:\n" f"{self.area_of_use or '- undefined'}\n" f"{coordinate_operation}" f"Datum: {self.datum}\n" f"- Ellipsoid: {self.ellipsoid or 'undefined'}\n" f"- Prime Meridian: {self.prime_meridian or 'undefined'}\n" f"{source_crs_repr}" f"{sub_crs_repr}" ) class CustomConstructorCRS(CRS): """ This class is a base class for CRS classes that use a different constructor than the main CRS class. .. versionadded:: 3.2.0 See: https://github.com/pyproj4/pyproj/issues/847 """ @property def _expected_types(self) -> tuple[str, ...]: """ These are the type names of the CRS class that are expected when using the from_* methods. """ raise NotImplementedError def _check_type(self): """ This validates that the type of the CRS is expected when using the from_* methods. """ if self.type_name not in self._expected_types: raise CRSError( f"Invalid type {self.type_name}. Expected {self._expected_types}." ) @classmethod def from_user_input(cls, value: Any, **kwargs) -> "CRS": """ Initialize a CRS class instance with: - PROJ string - Dictionary of PROJ parameters - PROJ keyword arguments for parameters - JSON string with PROJ parameters - CRS WKT string - An authority string [i.e. 'epsg:4326'] - An EPSG integer code [i.e. 4326] - A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')] - An object with a `to_wkt` method. - A :class:`pyproj.crs.CRS` class Parameters ---------- value : obj A Python int, dict, or str. Returns ------- CRS """ if isinstance(value, cls): return value crs = cls.__new__(cls) super(CustomConstructorCRS, crs).__init__(value, **kwargs) crs._check_type() return crs @property def geodetic_crs(self) -> Optional["CRS"]: """ .. versionadded:: 2.2.0 Returns ------- CRS: The geodeticCRS / geographicCRS from the CRS. """ return None if self._crs.geodetic_crs is None else CRS(self._crs.geodetic_crs) @property def source_crs(self) -> Optional["CRS"]: """ The base CRS of a BoundCRS or a DerivedCRS/ProjectedCRS, or the source CRS of a CoordinateOperation. Returns ------- CRS """ return None if self._crs.source_crs is None else CRS(self._crs.source_crs) @property def target_crs(self) -> Optional["CRS"]: """ .. versionadded:: 2.2.0 Returns ------- CRS: The hub CRS of a BoundCRS or the target CRS of a CoordinateOperation. """ return None if self._crs.target_crs is None else CRS(self._crs.target_crs) @property def sub_crs_list(self) -> list["CRS"]: """ If the CRS is a compound CRS, it will return a list of sub CRS objects. Returns ------- list[CRS] """ return [CRS(sub_crs) for sub_crs in self._crs.sub_crs_list] def to_3d(self, name: str | None = None) -> "CRS": """ .. versionadded:: 3.1.0 Convert the current CRS to the 3D version if it makes sense. New vertical axis attributes: - ellipsoidal height - oriented upwards - metre units Parameters ---------- name: str, optional CRS name. Defaults to use the name of the original CRS. Returns ------- CRS """ return CRS(self._crs.to_3d(name=name)) class GeographicCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Geographic CRS """ _expected_types = ("Geographic CRS", "Geographic 2D CRS", "Geographic 3D CRS") def __init__( self, name: str = "undefined", datum: Any = "urn:ogc:def:ensemble:EPSG::6326", ellipsoidal_cs: Any | None = None, ) -> None: """ Parameters ---------- name: str, default="undefined" Name of the CRS. datum: Any, default="urn:ogc:def:ensemble:EPSG::6326" Anything accepted by :meth:`pyproj.crs.Datum.from_user_input` or a :class:`pyproj.crs.datum.CustomDatum`. ellipsoidal_cs: Any, optional Input to create an Ellipsoidal Coordinate System. Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`. """ datum = Datum.from_user_input(datum).to_json_dict() geographic_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "GeographicCRS", "name": name, "coordinate_system": CoordinateSystem.from_user_input( ellipsoidal_cs or Ellipsoidal2DCS() ).to_json_dict(), } if datum["type"] == "DatumEnsemble": geographic_crs_json["datum_ensemble"] = datum else: geographic_crs_json["datum"] = datum super().__init__(geographic_crs_json) class DerivedGeographicCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Derived Geographic CRS """ _expected_types = ( "Derived Geographic CRS", "Derived Geographic 2D CRS", "Derived Geographic 3D CRS", ) def __init__( self, base_crs: Any, conversion: Any, ellipsoidal_cs: Any | None = None, name: str = "undefined", ) -> None: """ Parameters ---------- base_crs: Any Input to create the Geodetic CRS, a :class:`GeographicCRS` or anything accepted by :meth:`pyproj.crs.CRS.from_user_input`. conversion: Any Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or a conversion from :ref:`coordinate_operation`. ellipsoidal_cs: Any, optional Input to create an Ellipsoidal Coordinate System. Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`. name: str, default="undefined" Name of the CRS. """ derived_geographic_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "DerivedGeographicCRS", "name": name, "base_crs": CRS.from_user_input(base_crs).to_json_dict(), "conversion": CoordinateOperation.from_user_input( conversion ).to_json_dict(), "coordinate_system": CoordinateSystem.from_user_input( ellipsoidal_cs or Ellipsoidal2DCS() ).to_json_dict(), } super().__init__(derived_geographic_crs_json) class GeocentricCRS(CustomConstructorCRS): """ .. versionadded:: 3.2.0 This class is for building a Geocentric CRS """ _expected_types = ("Geocentric CRS",) def __init__( self, name: str = "undefined", datum: Any = "urn:ogc:def:datum:EPSG::6326", ) -> None: """ Parameters ---------- name: str, default="undefined" Name of the CRS. datum: Any, default="urn:ogc:def:datum:EPSG::6326" Anything accepted by :meth:`pyproj.crs.Datum.from_user_input` or a :class:`pyproj.crs.datum.CustomDatum`. """ geocentric_crs_json = { "$schema": ("https://proj.org/schemas/v0.2/projjson.schema.json"), "type": "GeodeticCRS", "name": name, "datum": Datum.from_user_input(datum).to_json_dict(), "coordinate_system": { "subtype": "Cartesian", "axis": [ { "name": "Geocentric X", "abbreviation": "X", "direction": "geocentricX", "unit": "metre", }, { "name": "Geocentric Y", "abbreviation": "Y", "direction": "geocentricY", "unit": "metre", }, { "name": "Geocentric Z", "abbreviation": "Z", "direction": "geocentricZ", "unit": "metre", }, ], }, } super().__init__(geocentric_crs_json) class ProjectedCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Projected CRS. """ _expected_types = ("Projected CRS", "Derived Projected CRS") def __init__( self, conversion: Any, name: str = "undefined", cartesian_cs: Any | None = None, geodetic_crs: Any | None = None, ) -> None: """ Parameters ---------- conversion: Any Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or a conversion from :ref:`coordinate_operation`. name: str, optional The name of the Projected CRS. Default is undefined. cartesian_cs: Any, optional Input to create a Cartesian Coordinate System. Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`. geodetic_crs: Any, optional Input to create the Geodetic CRS, a :class:`GeographicCRS` or anything accepted by :meth:`pyproj.crs.CRS.from_user_input`. """ proj_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "ProjectedCRS", "name": name, "base_crs": CRS.from_user_input( geodetic_crs or GeographicCRS() ).to_json_dict(), "conversion": CoordinateOperation.from_user_input( conversion ).to_json_dict(), "coordinate_system": CoordinateSystem.from_user_input( cartesian_cs or Cartesian2DCS() ).to_json_dict(), } super().__init__(proj_crs_json) class VerticalCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Vertical CRS. .. warning:: geoid_model support only exists in PROJ >= 6.3.0 """ _expected_types = ("Vertical CRS",) def __init__( self, name: str, datum: Any, vertical_cs: Any | None = None, geoid_model: str | None = None, ) -> None: """ Parameters ---------- name: str The name of the Vertical CRS (e.g. NAVD88 height). datum: Any Anything accepted by :meth:`pyproj.crs.Datum.from_user_input` vertical_cs: Any, optional Input to create a Vertical Coordinate System accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input` or :class:`pyproj.crs.coordinate_system.VerticalCS` geoid_model: str, optional The name of the GEOID Model (e.g. GEOID12B). """ vert_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "VerticalCRS", "name": name, "datum": Datum.from_user_input(datum).to_json_dict(), "coordinate_system": CoordinateSystem.from_user_input( vertical_cs or VerticalCS() ).to_json_dict(), } if geoid_model is not None: vert_crs_json["geoid_model"] = {"name": geoid_model} super().__init__(vert_crs_json) class CompoundCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Compound CRS. """ _expected_types = ("Compound CRS",) def __init__(self, name: str, components: list[Any]) -> None: """ Parameters ---------- name: str The name of the Compound CRS. components: list[Any], optional List of CRS to create a Compound Coordinate System. List of anything accepted by :meth:`pyproj.crs.CRS.from_user_input` """ compound_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "CompoundCRS", "name": name, "components": [ CRS.from_user_input(component).to_json_dict() for component in components ], } super().__init__(compound_crs_json) class BoundCRS(CustomConstructorCRS): """ .. versionadded:: 2.5.0 This class is for building a Bound CRS. """ _expected_types = ("Bound CRS",) def __init__(self, source_crs: Any, target_crs: Any, transformation: Any) -> None: """ Parameters ---------- source_crs: Any Input to create a source CRS. target_crs: Any Input to create the target CRS. transformation: Any Input to create the transformation. """ bound_crs_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "BoundCRS", "source_crs": CRS.from_user_input(source_crs).to_json_dict(), "target_crs": CRS.from_user_input(target_crs).to_json_dict(), "transformation": CoordinateOperation.from_user_input( transformation ).to_json_dict(), } super().__init__(bound_crs_json) pyproj-3.7.1/pyproj/crs/datum.py000066400000000000000000000071421475425760300166640ustar00rootroot00000000000000""" This module is for building datums to be used when building a CRS. """ from typing import Any from pyproj._crs import Datum, Ellipsoid, PrimeMeridian class CustomDatum(Datum): """ .. versionadded:: 2.5.0 Class to build a datum based on an ellipsoid and prime meridian. """ def __new__( cls, name: str = "undefined", ellipsoid: Any = "WGS 84", prime_meridian: Any = "Greenwich", ): """ Parameters ---------- name: str, default="undefined" Name of the datum. ellipsoid: Any, default="WGS 84" Anything accepted by :meth:`pyproj.crs.Ellipsoid.from_user_input` or a :class:`pyproj.crs.datum.CustomEllipsoid`. prime_meridian: Any, default="Greenwich" Anything accepted by :meth:`pyproj.crs.PrimeMeridian.from_user_input`. """ datum_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "GeodeticReferenceFrame", "name": name, "ellipsoid": Ellipsoid.from_user_input(ellipsoid).to_json_dict(), "prime_meridian": PrimeMeridian.from_user_input( prime_meridian ).to_json_dict(), } return cls.from_json_dict(datum_json) class CustomEllipsoid(Ellipsoid): """ .. versionadded:: 2.5.0 Class to build a custom ellipsoid. """ def __new__( cls, name: str = "undefined", semi_major_axis: float | None = None, inverse_flattening: float | None = None, semi_minor_axis: float | None = None, radius: float | None = None, ): """ Parameters ---------- name: str, default="undefined" Name of the ellipsoid. semi_major_axis: float, optional The semi major axis in meters. Required if missing radius. inverse_flattening: float, optional The inverse flattening in meters. Required if missing semi_minor_axis and radius. semi_minor_axis: float, optional The semi minor axis in meters. Required if missing inverse_flattening and radius. radius: float, optional The radius in meters. Can only be used alone. Cannot be mixed with other parameters. """ ellipsoid_json: dict[str, float | str] = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "Ellipsoid", "name": name, } if semi_major_axis is not None: ellipsoid_json["semi_major_axis"] = semi_major_axis if inverse_flattening is not None: ellipsoid_json["inverse_flattening"] = inverse_flattening if semi_minor_axis is not None: ellipsoid_json["semi_minor_axis"] = semi_minor_axis if radius is not None: ellipsoid_json["radius"] = radius return cls.from_json_dict(ellipsoid_json) class CustomPrimeMeridian(PrimeMeridian): """ .. versionadded:: 2.5.0 Class to build a prime meridian based on a longitude. """ def __new__(cls, longitude: float, name: str = "undefined"): """ Parameters ---------- longitude: float Longitude of prime meridian. name: str, optional Name of the prime meridian. """ datum_json = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "PrimeMeridian", "name": name, "longitude": longitude, } return cls.from_json_dict(datum_json) pyproj-3.7.1/pyproj/crs/enums.py000066400000000000000000000071331475425760300167010ustar00rootroot00000000000000""" This module contains enumerations used in pyproj.crs. """ from pyproj.enums import BaseEnum class DatumType(BaseEnum): """ .. versionadded:: 2.5.0 Datum Types for creating datum with :meth:`pyproj.crs.Datum.from_name` Attributes ---------- GEODETIC_REFERENCE_FRAME DYNAMIC_GEODETIC_REFERENCE_FRAME VERTICAL_REFERENCE_FRAME DYNAMIC_VERTICAL_REFERENCE_FRAME DATUM_ENSEMBLE """ GEODETIC_REFERENCE_FRAME = "GEODETIC_REFERENCE_FRAME" DYNAMIC_GEODETIC_REFERENCE_FRAME = "DYNAMIC_GEODETIC_REFERENCE_FRAME" VERTICAL_REFERENCE_FRAME = "VERTICAL_REFERENCE_FRAME" DYNAMIC_VERTICAL_REFERENCE_FRAME = "DYNAMIC_VERTICAL_REFERENCE_FRAME" DATUM_ENSEMBLE = "DATUM_ENSEMBLE" class CoordinateOperationType(BaseEnum): """ .. versionadded:: 2.5.0 Coordinate Operation Types for creating operation with :meth:`pyproj.crs.CoordinateOperation.from_name` Attributes ---------- CONVERSION TRANSFORMATION CONCATENATED_OPERATION OTHER_COORDINATE_OPERATION """ CONVERSION = "CONVERSION" TRANSFORMATION = "TRANSFORMATION" CONCATENATED_OPERATION = "CONCATENATED_OPERATION" OTHER_COORDINATE_OPERATION = "OTHER_COORDINATE_OPERATION" class Cartesian2DCSAxis(BaseEnum): """ .. versionadded:: 2.5.0 Cartesian 2D Coordinate System Axis for creating axis with with :class:`pyproj.crs.coordinate_system.Cartesian2DCS` Attributes ---------- EASTING_NORTHING NORTHING_EASTING EASTING_NORTHING_FT NORTHING_EASTING_FT EASTING_NORTHING_US_FT NORTHING_EASTING_US_FT NORTH_POLE_EASTING_SOUTH_NORTHING_SOUTH SOUTH_POLE_EASTING_NORTH_NORTHING_NORTH WESTING_SOUTHING """ EASTING_NORTHING = "EASTING_NORTHING" NORTHING_EASTING = "NORTHING_EASTING" EASTING_NORTHING_FT = "EASTING_NORTHING_FT" NORTHING_EASTING_FT = "NORTHING_EASTING_FT" EASTING_NORTHING_US_FT = "EASTING_NORTHING_US_FT" NORTHING_EASTING_US_FT = "NORTHING_EASTING_US_FT" NORTH_POLE_EASTING_SOUTH_NORTHING_SOUTH = "NORTH_POLE_EASTING_SOUTH_NORTHING_SOUTH" SOUTH_POLE_EASTING_NORTH_NORTHING_NORTH = "SOUTH_POLE_EASTING_NORTH_NORTHING_NORTH" WESTING_SOUTHING = "WESTING_SOUTHING" class Ellipsoidal2DCSAxis(BaseEnum): """ .. versionadded:: 2.5.0 Ellipsoidal 2D Coordinate System Axis for creating axis with with :class:`pyproj.crs.coordinate_system.Ellipsoidal2DCS` Attributes ---------- LONGITUDE_LATITUDE LATITUDE_LONGITUDE """ LONGITUDE_LATITUDE = "LONGITUDE_LATITUDE" LATITUDE_LONGITUDE = "LATITUDE_LONGITUDE" class Ellipsoidal3DCSAxis(BaseEnum): """ .. versionadded:: 2.5.0 Ellipsoidal 3D Coordinate System Axis for creating axis with with :class:`pyproj.crs.coordinate_system.Ellipsoidal3DCS` Attributes ---------- LONGITUDE_LATITUDE_HEIGHT LATITUDE_LONGITUDE_HEIGHT """ LONGITUDE_LATITUDE_HEIGHT = "LONGITUDE_LATITUDE_HEIGHT" LATITUDE_LONGITUDE_HEIGHT = "LATITUDE_LONGITUDE_HEIGHT" class VerticalCSAxis(BaseEnum): """ .. versionadded:: 2.5.0 Vertical Coordinate System Axis for creating axis with with :class:`pyproj.crs.coordinate_system.VerticalCS` Attributes ---------- UP UP_FT UP_US_FT DEPTH DEPTH_FT DEPTH_US_FT GRAVITY_HEIGHT GRAVITY_HEIGHT_FT GRAVITY_HEIGHT_US_FT """ GRAVITY_HEIGHT = "GRAVITY_HEIGHT" GRAVITY_HEIGHT_FT = "GRAVITY_HEIGHT_FT" GRAVITY_HEIGHT_US_FT = "GRAVITY_HEIGHT_US_FT" DEPTH = "DEPTH" DEPTH_FT = "DEPTH_FT" DEPTH_US_FT = "DEPTH_US_FT" UP = "UP" UP_FT = "UP_FT" UP_US_FT = "UP_US_FT" pyproj-3.7.1/pyproj/database.pyi000066400000000000000000000023121475425760300166720ustar00rootroot00000000000000from typing import NamedTuple from pyproj.aoi import AreaOfInterest, AreaOfUse from pyproj.enums import PJType class Unit(NamedTuple): auth_name: str code: str name: str category: str conv_factor: float proj_short_name: str | None deprecated: bool def get_units_map( auth_name: str | None = None, category: str | None = None, allow_deprecated: bool = False, ) -> dict[str, Unit]: ... def get_authorities() -> list[str]: ... def get_codes( auth_name: str, pj_type: PJType | str, allow_deprecated: bool = False ) -> list[str]: ... class CRSInfo(NamedTuple): auth_name: str code: str name: str type: PJType deprecated: bool area_of_use: AreaOfUse | None projection_method_name: str | None def query_crs_info( auth_name: str | None = None, pj_types: PJType | list[PJType] | None = None, area_of_interest: AreaOfInterest | None = None, contains: bool = False, allow_deprecated: bool = False, ) -> list[CRSInfo]: ... def query_utm_crs_info( datum_name: str | None = None, area_of_interest: AreaOfInterest | None = None, contains: bool = False, ) -> list[CRSInfo]: ... def get_database_metadata(key: str) -> str | None: ... pyproj-3.7.1/pyproj/database.pyx000066400000000000000000000340701475425760300167170ustar00rootroot00000000000000include "proj.pxi" import warnings from collections import namedtuple from libc.stdlib cimport free, malloc from pyproj._compat cimport cstrdecode, cstrencode from pyproj._context cimport pyproj_context_create from pyproj.aoi import AreaOfUse from pyproj.enums import PJType cdef dict _PJ_TYPE_MAP = { PJType.UNKNOWN: PJ_TYPE_UNKNOWN, PJType.ELLIPSOID: PJ_TYPE_ELLIPSOID, PJType.PRIME_MERIDIAN: PJ_TYPE_PRIME_MERIDIAN, PJType.GEODETIC_REFERENCE_FRAME: PJ_TYPE_GEODETIC_REFERENCE_FRAME, PJType.DYNAMIC_GEODETIC_REFERENCE_FRAME: PJ_TYPE_DYNAMIC_GEODETIC_REFERENCE_FRAME, PJType.VERTICAL_REFERENCE_FRAME: PJ_TYPE_VERTICAL_REFERENCE_FRAME, PJType.DYNAMIC_VERTICAL_REFERENCE_FRAME: PJ_TYPE_DYNAMIC_VERTICAL_REFERENCE_FRAME, PJType.DATUM_ENSEMBLE: PJ_TYPE_DATUM_ENSEMBLE, PJType.CRS: PJ_TYPE_CRS, PJType.GEODETIC_CRS: PJ_TYPE_GEODETIC_CRS, PJType.GEOCENTRIC_CRS: PJ_TYPE_GEOCENTRIC_CRS, PJType.GEOGRAPHIC_CRS: PJ_TYPE_GEOGRAPHIC_CRS, PJType.GEOGRAPHIC_2D_CRS: PJ_TYPE_GEOGRAPHIC_2D_CRS, PJType.GEOGRAPHIC_3D_CRS: PJ_TYPE_GEOGRAPHIC_3D_CRS, PJType.VERTICAL_CRS: PJ_TYPE_VERTICAL_CRS, PJType.PROJECTED_CRS: PJ_TYPE_PROJECTED_CRS, PJType.COMPOUND_CRS: PJ_TYPE_COMPOUND_CRS, PJType.TEMPORAL_CRS: PJ_TYPE_TEMPORAL_CRS, PJType.ENGINEERING_CRS: PJ_TYPE_ENGINEERING_CRS, PJType.BOUND_CRS: PJ_TYPE_BOUND_CRS, PJType.OTHER_CRS: PJ_TYPE_OTHER_CRS, PJType.CONVERSION: PJ_TYPE_CONVERSION, PJType.TRANSFORMATION: PJ_TYPE_TRANSFORMATION, PJType.CONCATENATED_OPERATION: PJ_TYPE_CONCATENATED_OPERATION, PJType.OTHER_COORDINATE_OPERATION: PJ_TYPE_OTHER_COORDINATE_OPERATION, PJType.DERIVED_PROJECTED_CRS: PJ_TYPE_DERIVED_PROJECTED_CRS, } cdef dict _INV_PJ_TYPE_MAP = {value: key for key, value in _PJ_TYPE_MAP.items()} cdef PJ_TYPE get_pj_type(pj_type) except *: if not isinstance(pj_type, PJType): pj_type = PJType.create(pj_type) return _PJ_TYPE_MAP[pj_type] def get_authorities(): """ .. versionadded:: 2.4.0 See: :c:func:`proj_get_authorities_from_database` Returns ------- list[str]: Authorities in PROJ database. """ cdef PJ_CONTEXT* context = pyproj_context_create() cdef PROJ_STRING_LIST proj_auth_list = NULL with nogil: proj_auth_list = proj_get_authorities_from_database(context) if proj_auth_list == NULL: return [] cdef int iii = 0 try: auth_list = [] while proj_auth_list[iii] != NULL: auth_list.append(proj_auth_list[iii]) iii += 1 finally: proj_string_list_destroy(proj_auth_list) return auth_list def get_codes(str auth_name not None, pj_type not None, bint allow_deprecated=False): """ .. versionadded:: 2.4.0 See: :c:func:`proj_get_codes_from_database` Parameters ---------- auth_name: str The name of the authority. pj_type: pyproj.enums.PJType The type of object to get the authorities. allow_deprecated: bool, default=False Allow a deprecated code in the return. Returns ------- list[str]: Codes associated with authorities in PROJ database. """ cdef PJ_TYPE cpj_type = get_pj_type(pj_type) cdef PROJ_STRING_LIST proj_code_list = NULL cdef PJ_CONTEXT* context = pyproj_context_create() cdef const char* c_auth_name = NULL b_auth_name = cstrencode(auth_name) c_auth_name = b_auth_name with nogil: proj_code_list = proj_get_codes_from_database( context, c_auth_name, cpj_type, allow_deprecated, ) if proj_code_list == NULL: return [] cdef int iii = 0 try: code_list = [] while proj_code_list[iii] != NULL: code_list.append(proj_code_list[iii]) iii += 1 finally: proj_string_list_destroy(proj_code_list) return code_list CRSInfo = namedtuple( "CRSInfo", [ "auth_name", "code", "name", "type", "deprecated", "area_of_use", "projection_method_name", ], ) CRSInfo.__doc__ = """ .. versionadded:: 3.0.0 CRS Information Parameters ---------- auth_name: str Authority name. code: str Object code. name: str Object name. type: PJType The type of CRS deprecated: bool Whether the object is deprecated. area_of_use: AreaOfUse | None The area of use for the CRS if valid. projection_method_name: str | None Name of the projection method for a projected CRS. """ def query_crs_info( str auth_name=None, pj_types=None, area_of_interest=None, bint contains=False, bint allow_deprecated=False, ): """ .. versionadded:: 3.0.0 Query for CRS information from the PROJ database. See: :c:func:`proj_get_crs_info_list_from_database` Parameters ---------- auth_name: str, optional The name of the authority. Default is all authorities. pj_types: pyproj.enums.PJType | list[pyproj.enums.PJType | None, optional The type(s) of CRS to get the information (i.e. the types with CRS in the name). If None is provided, it will use all of types (i.e. PJType.CRS). area_of_interest: AreaOfInterest, optional Filter returned CRS by the area of interest. Default method is intersection. contains: bool, default=False Only works if the area of interest is passed in. If True, then only CRS whose area of use entirely contains the specified bounding box will be returned. If False, then only CRS whose area of use intersects the specified bounding box will be returned. allow_deprecated: bool, default=False Allow a deprecated code in the return. Returns ------- list[CRSInfo]: CRS information from the PROJ database. """ cdef PJ_TYPE *pj_type_list = NULL cdef PROJ_CRS_LIST_PARAMETERS *query_params = NULL cdef PROJ_CRS_INFO **crs_info_list = NULL cdef const char* c_auth_name = NULL cdef int result_count = 0 cdef int pj_type_count = 0 cdef int iii = 0 cdef bytes b_auth_name cdef PJ_CONTEXT* context = pyproj_context_create() if auth_name is not None: b_auth_name = cstrencode(auth_name) c_auth_name = b_auth_name try: if pj_types is not None: if isinstance(pj_types, (PJType, str)): pj_types = (pj_types,) pj_type_count = len(pj_types) pj_type_list = malloc( pj_type_count * sizeof(PJ_TYPE) ) for iii in range(pj_type_count): pj_type_list[iii] = get_pj_type(pj_types[iii]) query_params = proj_get_crs_list_parameters_create() query_params.types = pj_type_list query_params.typesCount = pj_type_count query_params.allow_deprecated = bool(allow_deprecated) if area_of_interest: query_params.crs_area_of_use_contains_bbox = bool(contains) query_params.bbox_valid = True query_params.west_lon_degree = area_of_interest.west_lon_degree query_params.south_lat_degree = area_of_interest.south_lat_degree query_params.east_lon_degree = area_of_interest.east_lon_degree query_params.north_lat_degree = area_of_interest.north_lat_degree with nogil: crs_info_list = proj_get_crs_info_list_from_database( context, c_auth_name, query_params, &result_count) finally: if query_params != NULL: proj_get_crs_list_parameters_destroy(query_params) if pj_type_list != NULL: free(pj_type_list) if crs_info_list == NULL: return [] try: code_list = [] iii = 0 while crs_info_list[iii] != NULL: area_of_use = None if crs_info_list[iii].bbox_valid: area_of_use = AreaOfUse( west=crs_info_list[iii].west_lon_degree, south=crs_info_list[iii].south_lat_degree, east=crs_info_list[iii].east_lon_degree, north=crs_info_list[iii].north_lat_degree, name=cstrdecode(crs_info_list[iii].area_name), ) code_list.append(CRSInfo( auth_name=crs_info_list[iii].auth_name, code=crs_info_list[iii].code, name=crs_info_list[iii].name, type=_INV_PJ_TYPE_MAP[crs_info_list[iii].type], deprecated=bool(crs_info_list[iii].deprecated), area_of_use=area_of_use, projection_method_name=cstrdecode( crs_info_list[iii].projection_method_name ) )) iii += 1 finally: proj_crs_info_list_destroy(crs_info_list) return code_list def query_utm_crs_info( str datum_name=None, area_of_interest=None, bint contains=False, ): """ .. versionadded:: 3.0.0 Query for EPSG UTM CRS information from the PROJ database. See: :c:func:`proj_get_crs_info_list_from_database` Parameters ---------- datum_name: str, optional The name of the datum in the CRS name ('NAD27', 'NAD83', 'WGS 84', ...). area_of_interest: AreaOfInterest, optional Filter returned CRS by the area of interest. Default method is intersection. contains: bool, default=False Only works if the area of interest is passed in. If True, then only CRS whose area of use entirely contains the specified bounding box will be returned. If False, then only CRS whose area of use intersects the specified bounding box will be returned. Returns ------- list[CRSInfo]: UTM CRS information from the PROJ database. """ projected_crs = query_crs_info( auth_name="EPSG", pj_types=PJType.PROJECTED_CRS, area_of_interest=area_of_interest, contains=contains, ) utm_crs = [crs for crs in projected_crs if "UTM zone" in crs.name] if datum_name is None: return utm_crs datum_name = datum_name.replace(" ", "") return [ crs for crs in utm_crs if datum_name == crs.name.split("/")[0].replace(" ", "") ] Unit = namedtuple( "Unit", [ "auth_name", "code", "name", "category", "conv_factor", "proj_short_name", "deprecated", ], ) Unit.__doc__ = """ .. versionadded:: 3.0.0 Parameters ---------- auth_name: str Authority name. code: str Object code. name: str Object name. For example "metre", "US survey foot", etc. category: str Category of the unit: one of "linear", "linear_per_time", "angular", "angular_per_time", "scale", "scale_per_time" or "time". conv_factor: float Conversion factor to apply to transform from that unit to the corresponding SI unit (metre for "linear", radian for "angular", etc.). It might be 0 in some cases to indicate no known conversion factor. proj_short_name: str, optional PROJ short name, like "m", "ft", "us-ft", etc... Might be None. deprecated: bool Whether the object is deprecated. """ def get_units_map(str auth_name=None, str category=None, bint allow_deprecated=False): """ .. versionadded:: 2.2.0 .. versionadded:: 3.0.0 query PROJ database. Get the units available in the PROJ database. See: :c:func:`proj_get_units_from_database` Parameters ---------- auth_name: str, optional The authority name to filter by (e.g. EPSG, PROJ). Default is all. category: str, optional Category of the unit: one of "linear", "linear_per_time", "angular", "angular_per_time", "scale", "scale_per_time" or "time". Default is all. allow_deprecated: bool, default=False Whether or not to allow deprecated units. Returns ------- dict[str, Unit] """ cdef const char* c_auth_name = NULL cdef const char* c_category = NULL cdef bytes b_auth_name cdef bytes b_category if auth_name is not None: b_auth_name = cstrencode(auth_name) c_auth_name = b_auth_name if category is not None: b_category = cstrencode(category) c_category = b_category cdef int num_units = 0 cdef PJ_CONTEXT* context = pyproj_context_create() cdef PROJ_UNIT_INFO** db_unit_list = NULL with nogil: db_unit_list = proj_get_units_from_database( context, c_auth_name, c_category, bool(allow_deprecated), &num_units, ) units_map = {} try: for iii in range(num_units): proj_short_name = None if db_unit_list[iii].proj_short_name != NULL: proj_short_name = db_unit_list[iii].proj_short_name name = db_unit_list[iii].name units_map[name] = Unit( auth_name=db_unit_list[iii].auth_name, code=db_unit_list[iii].code, name=name, category=db_unit_list[iii].category, conv_factor=db_unit_list[iii].conv_factor, proj_short_name=proj_short_name, deprecated=bool(db_unit_list[iii].deprecated), ) finally: proj_unit_list_destroy(db_unit_list) return units_map def get_database_metadata(str key not None): """ Return metadata from the database. See: :c:func:`proj_context_get_database_metadata` Available keys: - DATABASE.LAYOUT.VERSION.MAJOR - DATABASE.LAYOUT.VERSION.MINOR - EPSG.VERSION - EPSG.DATE - ESRI.VERSION - ESRI.DATE - IGNF.SOURCE - IGNF.VERSION - IGNF.DATE - NKG.SOURCE - NKG.VERSION - NKG.DATE - PROJ.VERSION - PROJ_DATA.VERSION : PROJ-data version most compatible with this database. Parameters ---------- key: str The name of the metadata item to get data for. Returns ------- str | None: The metatada information if available. """ cdef const char* metadata = NULL metadata = proj_context_get_database_metadata( pyproj_context_create(), cstrencode(key), ) if metadata == NULL: return None return metadata pyproj-3.7.1/pyproj/datadir.py000066400000000000000000000073461475425760300164010ustar00rootroot00000000000000""" Module for managing the PROJ data directory. """ # pylint: disable=global-statement import os import shutil import sys from pathlib import Path from pyproj._context import ( # noqa: F401 pylint: disable=unused-import _set_context_data_dir, get_user_data_dir, ) from pyproj.exceptions import DataDirError _USER_PROJ_DATA = None _VALIDATED_PROJ_DATA = None def set_data_dir(proj_data_dir: str | Path) -> None: """ Set the data directory for PROJ to use. Parameters ---------- proj_data_dir: str | Path The path to the PROJ data directory. """ global _USER_PROJ_DATA global _VALIDATED_PROJ_DATA _USER_PROJ_DATA = str(proj_data_dir) # set to none to re-validate _VALIDATED_PROJ_DATA = None # need to reset the global PROJ context # to prevent core dumping if the data directory # is not found. _set_context_data_dir() def append_data_dir(proj_data_dir: str | Path) -> None: """ Add an additional data directory for PROJ to use. Parameters ---------- proj_data_dir: str | Path The path to the PROJ data directory. """ set_data_dir(os.pathsep.join([get_data_dir(), str(proj_data_dir)])) def get_data_dir() -> str: """ The order of preference for the data directory is: 1. The one set by pyproj.datadir.set_data_dir (if exists & valid) 2. The internal proj directory (if exists & valid) 3. The directory in PROJ_DATA (PROJ 9.1+) | PROJ_LIB (PROJ<9.1) (if exists & valid) 4. The directory on sys.prefix (if exists & valid) 5. The directory on the PATH (if exists & valid) Returns ------- str: The valid data directory. """ # to avoid re-validating global _VALIDATED_PROJ_DATA if _VALIDATED_PROJ_DATA is not None: return _VALIDATED_PROJ_DATA internal_datadir = Path(__file__).absolute().parent / "proj_dir" / "share" / "proj" proj_lib_dirs = os.environ.get("PROJ_DATA", os.environ.get("PROJ_LIB", "")) prefix_datadir = Path(sys.prefix, "share", "proj") conda_windows_prefix_datadir = Path(sys.prefix, "Library", "share", "proj") def valid_data_dir(potential_data_dir): if ( potential_data_dir is not None and Path(potential_data_dir, "proj.db").exists() ): return True return False def valid_data_dirs(potential_data_dirs): if potential_data_dirs is None: return False for proj_data_dir in potential_data_dirs.split(os.pathsep): if valid_data_dir(proj_data_dir): return True return None if valid_data_dirs(_USER_PROJ_DATA): _VALIDATED_PROJ_DATA = _USER_PROJ_DATA elif valid_data_dir(internal_datadir): _VALIDATED_PROJ_DATA = str(internal_datadir) elif valid_data_dirs(proj_lib_dirs): _VALIDATED_PROJ_DATA = proj_lib_dirs elif valid_data_dir(prefix_datadir): _VALIDATED_PROJ_DATA = str(prefix_datadir) elif valid_data_dir(conda_windows_prefix_datadir): _VALIDATED_PROJ_DATA = str(conda_windows_prefix_datadir) else: proj_exe = shutil.which("proj", path=sys.prefix) if proj_exe is None: proj_exe = shutil.which("proj") if proj_exe is not None: system_proj_dir = Path(proj_exe).parent.parent / "share" / "proj" if valid_data_dir(system_proj_dir): _VALIDATED_PROJ_DATA = str(system_proj_dir) if _VALIDATED_PROJ_DATA is None: raise DataDirError( "Valid PROJ data directory not found. " "Either set the path using the environmental variable " "PROJ_DATA (PROJ 9.1+) | PROJ_LIB (PROJ<9.1) or " "with `pyproj.datadir.set_data_dir`." ) return _VALIDATED_PROJ_DATA pyproj-3.7.1/pyproj/enums.py000066400000000000000000000077741475425760300161250ustar00rootroot00000000000000""" This module contains enumerations used in pyproj. """ from enum import Enum, IntFlag class BaseEnum(Enum): """ Base enumeration class that handles input as strings ignoring case. """ @classmethod def create(cls, item): """ Handles finding the enumeration ignoring case if provided as a string. """ try: return cls(item) except ValueError: pass if isinstance(item, str): item = item.upper() for member in cls: if member.value == item: return member raise ValueError( f"Invalid value supplied '{item}'. " f"Only {tuple(version.value for version in cls)} are supported." ) class WktVersion(BaseEnum): """ .. versionadded:: 2.2.0 Supported CRS WKT string versions See: :c:enum:`PJ_WKT_TYPE` """ #: WKT Version 2 from 2015 WKT2_2015 = "WKT2_2015" #: WKT Version 2 from 2015 Simplified WKT2_2015_SIMPLIFIED = "WKT2_2015_SIMPLIFIED" #: Deprecated alias for WKT Version 2 from 2019 WKT2_2018 = "WKT2_2018" #: Deprecated alias for WKT Version 2 from 2019 Simplified WKT2_2018_SIMPLIFIED = "WKT2_2018_SIMPLIFIED" #: WKT Version 2 from 2019 WKT2_2019 = "WKT2_2019" #: WKT Version 2 from 2019 Simplified WKT2_2019_SIMPLIFIED = "WKT2_2019_SIMPLIFIED" #: WKT Version 1 GDAL Style WKT1_GDAL = "WKT1_GDAL" #: WKT Version 1 ESRI Style WKT1_ESRI = "WKT1_ESRI" class ProjVersion(BaseEnum): """ .. versionadded:: 2.2.0 Supported CRS PROJ string versions """ #: PROJ String version 4 PROJ_4 = 4 #: PROJ String version 5 PROJ_5 = 5 class TransformDirection(BaseEnum): """ .. versionadded:: 2.2.0 Supported transform directions """ #: Forward direction FORWARD = "FORWARD" #: Inverse direction INVERSE = "INVERSE" #: Do nothing IDENT = "IDENT" class PJType(BaseEnum): """ .. versionadded:: 2.4.0 PJ Types for listing codes with :func:`pyproj.get_codes` See: :c:enum:`PJ_TYPE` Attributes ---------- UNKNOWN ELLIPSOID PRIME_MERIDIAN GEODETIC_REFERENCE_FRAME DYNAMIC_GEODETIC_REFERENCE_FRAME VERTICAL_REFERENCE_FRAME DYNAMIC_VERTICAL_REFERENCE_FRAME DATUM_ENSEMBLE CRS GEODETIC_CRS GEOCENTRIC_CRS GEOGRAPHIC_CRS GEOGRAPHIC_2D_CRS GEOGRAPHIC_3D_CRS VERTICAL_CRS PROJECTED_CRS COMPOUND_CRS TEMPORAL_CRS ENGINEERING_CRS BOUND_CRS OTHER_CRS CONVERSION TRANSFORMATION CONCATENATED_OPERATION OTHER_COORDINATE_OPERATION """ UNKNOWN = "UNKNOWN" ELLIPSOID = "ELLIPSOID" PRIME_MERIDIAN = "PRIME_MERIDIAN" GEODETIC_REFERENCE_FRAME = "GEODETIC_REFERENCE_FRAME" DYNAMIC_GEODETIC_REFERENCE_FRAME = "DYNAMIC_GEODETIC_REFERENCE_FRAME" VERTICAL_REFERENCE_FRAME = "VERTICAL_REFERENCE_FRAME" DYNAMIC_VERTICAL_REFERENCE_FRAME = "DYNAMIC_VERTICAL_REFERENCE_FRAME" DATUM_ENSEMBLE = "DATUM_ENSEMBLE" CRS = "CRS" GEODETIC_CRS = "GEODETIC_CRS" GEOCENTRIC_CRS = "GEOCENTRIC_CRS" GEOGRAPHIC_CRS = "GEOGRAPHIC_CRS" GEOGRAPHIC_2D_CRS = "GEOGRAPHIC_2D_CRS" GEOGRAPHIC_3D_CRS = "GEOGRAPHIC_3D_CRS" VERTICAL_CRS = "VERTICAL_CRS" PROJECTED_CRS = "PROJECTED_CRS" DERIVED_PROJECTED_CRS = "DERIVED_PROJECTED_CRS" COMPOUND_CRS = "COMPOUND_CRS" TEMPORAL_CRS = "TEMPORAL_CRS" ENGINEERING_CRS = "ENGINEERING_CRS" BOUND_CRS = "BOUND_CRS" OTHER_CRS = "OTHER_CRS" CONVERSION = "CONVERSION" TRANSFORMATION = "TRANSFORMATION" CONCATENATED_OPERATION = "CONCATENATED_OPERATION" OTHER_COORDINATE_OPERATION = "OTHER_COORDINATE_OPERATION" class GeodIntermediateFlag(IntFlag): """ .. versionadded:: 3.1.0 Flags to be used in Geod.[inv|fwd]_intermediate() """ DEFAULT = 0x0 NPTS_ROUND = 0x0 NPTS_CEIL = 0x1 NPTS_TRUNC = 0x2 DEL_S_RECALC = 0x00 DEL_S_NO_RECALC = 0x10 AZIS_DISCARD = 0x000 AZIS_KEEP = 0x100 pyproj-3.7.1/pyproj/exceptions.py000066400000000000000000000012661475425760300171450ustar00rootroot00000000000000""" Exceptions for pyproj """ from pyproj._context import _clear_proj_error, _get_proj_error class ProjError(RuntimeError): """Raised when a Proj error occurs.""" def __init__(self, error_message: str) -> None: proj_error = _get_proj_error() if proj_error is not None: error_message = f"{error_message}: (Internal Proj Error: {proj_error})" _clear_proj_error() super().__init__(error_message) class CRSError(ProjError): """Raised when a CRS error occurs.""" class GeodError(RuntimeError): """Raised when a Geod error occurs.""" class DataDirError(RuntimeError): """Raised when a the data directory was not found.""" pyproj-3.7.1/pyproj/geod.py000066400000000000000000001252171475425760300157050ustar00rootroot00000000000000""" The Geod class can perform forward and inverse geodetic, or Great Circle, computations. The forward computation involves determining latitude, longitude and back azimuth of a terminus point given the latitude and longitude of an initial point, plus azimuth and distance. The inverse computation involves determining the forward and back azimuths and distance given the latitudes and longitudes of an initial and terminus point. """ __all__ = [ "Geod", "pj_ellps", "geodesic_version_str", "GeodIntermediateFlag", "GeodIntermediateReturn", "reverse_azimuth", ] import math import warnings from typing import Any from pyproj._geod import Geod as _Geod from pyproj._geod import GeodIntermediateReturn, geodesic_version_str from pyproj._geod import reverse_azimuth as _reverse_azimuth from pyproj.enums import GeodIntermediateFlag from pyproj.exceptions import GeodError from pyproj.list import get_ellps_map from pyproj.utils import DataType, _convertback, _copytobuffer pj_ellps = get_ellps_map() def _params_from_ellps_map(ellps: str) -> tuple[float, float, float, float, bool]: """ Build Geodesic parameters from PROJ ellips map Parameter --------- ellps: str The name of the ellipse in the map. Returns ------- tuple[float, float, float, float, bool] """ ellps_dict = pj_ellps[ellps] semi_major_axis, semi_minor_axis, flattening, eccentricity_squared = ( _params_from_kwargs(ellps_dict) ) sphere = False if ellps_dict["description"] == "Normal Sphere": sphere = True return semi_major_axis, semi_minor_axis, flattening, eccentricity_squared, sphere def _params_from_kwargs(kwargs: dict) -> tuple[float, float, float, float]: """ Build Geodesic parameters from input kwargs: - a: the semi-major axis (required). Need least one of these parameters. - b: the semi-minor axis - rf: the reciprocal flattening - f: flattening - es: eccentricity squared Parameter --------- kwargs: dict The input kwargs for an ellipse. Returns ------- tuple[float, float, float, float] """ semi_major_axis = kwargs["a"] if "b" in kwargs: semi_minor_axis = kwargs["b"] eccentricity_squared = 1.0 - semi_minor_axis**2 / semi_major_axis**2 flattening = (semi_major_axis - semi_minor_axis) / semi_major_axis elif "rf" in kwargs: flattening = 1.0 / kwargs["rf"] semi_minor_axis = semi_major_axis * (1.0 - flattening) eccentricity_squared = 1.0 - semi_minor_axis**2 / semi_major_axis**2 elif "f" in kwargs: flattening = kwargs["f"] semi_minor_axis = semi_major_axis * (1.0 - flattening) eccentricity_squared = 1.0 - (semi_minor_axis / semi_major_axis) ** 2 elif "es" in kwargs: eccentricity_squared = kwargs["es"] semi_minor_axis = math.sqrt( semi_major_axis**2 - eccentricity_squared * semi_major_axis**2 ) flattening = (semi_major_axis - semi_minor_axis) / semi_major_axis elif "e" in kwargs: eccentricity_squared = kwargs["e"] ** 2 semi_minor_axis = math.sqrt( semi_major_axis**2 - eccentricity_squared * semi_major_axis**2 ) flattening = (semi_major_axis - semi_minor_axis) / semi_major_axis else: semi_minor_axis = semi_major_axis flattening = 0.0 eccentricity_squared = 0.0 return semi_major_axis, semi_minor_axis, flattening, eccentricity_squared class Geod(_Geod): """ performs forward and inverse geodetic, or Great Circle, computations. The forward computation (using the 'fwd' method) involves determining latitude, longitude and back azimuth of a terminus point given the latitude and longitude of an initial point, plus azimuth and distance. The inverse computation (using the 'inv' method) involves determining the forward and back azimuths and distance given the latitudes and longitudes of an initial and terminus point. Attributes ---------- initstring: str The string form of the user input used to create the Geod. sphere: bool If True, it is a sphere. a: float The ellipsoid equatorial radius, or semi-major axis. b: float The ellipsoid polar radius, or semi-minor axis. es: float The 'eccentricity' of the ellipse, squared (1-b2/a2). f: float The ellipsoid 'flattening' parameter ( (a-b)/a ). """ def __init__(self, initstring: str | None = None, **kwargs) -> None: """ initialize a Geod class instance. Geodetic parameters for specifying the ellipsoid can be given in a dictionary 'initparams', as keyword arguments, or as as proj geod initialization string. You can get a dictionary of ellipsoids using :func:`pyproj.get_ellps_map` or with the variable `pyproj.pj_ellps`. The parameters of the ellipsoid may also be set directly using the 'a' (semi-major or equatorial axis radius) keyword, and any one of the following keywords: 'b' (semi-minor, or polar axis radius), 'e' (eccentricity), 'es' (eccentricity squared), 'f' (flattening), or 'rf' (reciprocal flattening). See the proj documentation (https://proj.org) for more information about specifying ellipsoid parameters. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of some cities. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> newyork_lat = 40.+(47./60.); newyork_lon = -73.-(58./60.) >>> london_lat = 51.+(32./60.); london_lon = -(5./60.) >>> # compute forward and back azimuths, plus distance >>> # between Boston and Portland. >>> az12,az21,dist = g.inv(boston_lon,boston_lat,portland_lon,portland_lat) >>> f"{az12:.3f} {az21:.3f} {dist:.3f}" '-66.531 75.654 4164192.708' >>> # compute latitude, longitude and back azimuth of Portland, >>> # given Boston lat/lon, forward azimuth and distance to Portland. >>> endlon, endlat, backaz = g.fwd(boston_lon, boston_lat, az12, dist) >>> f"{endlat:.3f} {endlon:.3f} {backaz:.3f}" '45.517 -123.683 75.654' >>> # compute the azimuths, distances from New York to several >>> # cities (pass a list) >>> lons1 = 3*[newyork_lon]; lats1 = 3*[newyork_lat] >>> lons2 = [boston_lon, portland_lon, london_lon] >>> lats2 = [boston_lat, portland_lat, london_lat] >>> az12,az21,dist = g.inv(lons1,lats1,lons2,lats2) >>> for faz, baz, d in list(zip(az12,az21,dist)): ... f"{faz:7.3f} {baz:8.3f} {d:12.3f}" ' 54.663 -123.448 288303.720' '-65.463 79.342 4013037.318' ' 51.254 -71.576 5579916.651' >>> g2 = Geod('+ellps=clrk66') # use proj4 style initialization string >>> az12,az21,dist = g2.inv(boston_lon,boston_lat,portland_lon,portland_lat) >>> f"{az12:.3f} {az21:.3f} {dist:.3f}" '-66.531 75.654 4164192.708' """ # if initparams is a proj-type init string, # convert to dict. ellpsd: dict[str, str | float] = {} if initstring is not None: for kvpair in initstring.split(): # Actually only +a and +b are needed # We can ignore safely any parameter that doesn't have a value if kvpair.find("=") == -1: continue key, val = kvpair.split("=") key = key.lstrip("+") if key in ["a", "b", "rf", "f", "es", "e"]: ellpsd[key] = float(val) else: ellpsd[key] = val # merge this dict with kwargs dict. kwargs = dict(list(kwargs.items()) + list(ellpsd.items())) sphere = False if "ellps" in kwargs: ( semi_major_axis, semi_minor_axis, flattening, eccentricity_squared, sphere, ) = _params_from_ellps_map(kwargs["ellps"]) else: ( semi_major_axis, semi_minor_axis, flattening, eccentricity_squared, ) = _params_from_kwargs(kwargs) if math.fabs(flattening) < 1.0e-8: sphere = True super().__init__( semi_major_axis, flattening, sphere, semi_minor_axis, eccentricity_squared ) def fwd( # pylint: disable=invalid-name self, lons: Any, lats: Any, az: Any, dist: Any, radians: bool = False, inplace: bool = False, return_back_azimuth: bool = True, ) -> tuple[Any, Any, Any]: """ Forward transformation Determine longitudes, latitudes and back azimuths of terminus points given longitudes and latitudes of initial points, plus forward azimuths and distances. .. versionadded:: 3.5.0 inplace .. versionadded:: 3.5.0 return_back_azimuth Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- lons: scalar or array Longitude(s) of initial point(s) lats: scalar or array Latitude(s) of initial point(s) az: scalar or array Forward azimuth(s) dist: scalar or array Distance(s) between initial and terminus point(s) in meters radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. inplace: bool, default=False If True, will attempt to write the results to the input array instead of returning a new array. This will fail if the input is not an array in C order with the double data type. return_back_azimuth: bool, default=True If True, the third return value will be the back azimuth, Otherwise, it will be the forward azimuth. Returns ------- scalar or array: Longitude(s) of terminus point(s) scalar or array: Latitude(s) of terminus point(s) scalar or array: Back azimuth(s) or Forward azimuth(s) """ try: # Fast-path for scalar input, will raise if invalid types are input # and we can fallback below return self._fwd_point( lons, lats, az, dist, radians=radians, return_back_azimuth=return_back_azimuth, ) except TypeError: pass # process inputs, making copies that support buffer API. inx, x_data_type = _copytobuffer(lons, inplace=inplace) iny, y_data_type = _copytobuffer(lats, inplace=inplace) inz, z_data_type = _copytobuffer(az, inplace=inplace) ind = _copytobuffer(dist, inplace=inplace)[0] self._fwd( inx, iny, inz, ind, radians=radians, return_back_azimuth=return_back_azimuth ) # if inputs were lists, tuples or floats, convert back. outx = _convertback(x_data_type, inx) outy = _convertback(y_data_type, iny) outz = _convertback(z_data_type, inz) return outx, outy, outz def inv( self, lons1: Any, lats1: Any, lons2: Any, lats2: Any, radians: bool = False, inplace: bool = False, return_back_azimuth: bool = True, ) -> tuple[Any, Any, Any]: """ Inverse transformation Determine forward and back azimuths, plus distances between initial points and terminus points. .. versionadded:: 3.5.0 inplace .. versionadded:: 3.5.0 return_back_azimuth Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- lons1: scalar or array Longitude(s) of initial point(s) lats1: scalar or array Latitude(s) of initial point(s) lons2: scalar or array Longitude(s) of terminus point(s) lats2: scalar or array Latitude(s) of terminus point(s) radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. inplace: bool, default=False If True, will attempt to write the results to the input array instead of returning a new array. This will fail if the input is not an array in C order with the double data type. return_back_azimuth: bool, default=True If True, the second return value (azi21) will be the back azimuth (flipped 180 degrees), Otherwise, it will also be a forward azimuth. Returns ------- scalar or array: Forward azimuth(s) (azi12) scalar or array: Back azimuth(s) or Forward azimuth(s) (azi21) scalar or array: Distance(s) between initial and terminus point(s) in meters """ try: # Fast-path for scalar input, will raise if invalid types are input # and we can fallback below return self._inv_point( lons1, lats1, lons2, lats2, radians=radians, return_back_azimuth=return_back_azimuth, ) except TypeError: pass # process inputs, making copies that support buffer API. inx, x_data_type = _copytobuffer(lons1, inplace=inplace) iny, y_data_type = _copytobuffer(lats1, inplace=inplace) inz, z_data_type = _copytobuffer(lons2, inplace=inplace) ind = _copytobuffer(lats2, inplace=inplace)[0] self._inv( inx, iny, inz, ind, radians=radians, return_back_azimuth=return_back_azimuth ) # if inputs were lists, tuples or floats, convert back. outx = _convertback(x_data_type, inx) outy = _convertback(y_data_type, iny) outz = _convertback(z_data_type, inz) return outx, outy, outz def npts( self, lon1: float, lat1: float, lon2: float, lat2: float, npts: int, radians: bool = False, initial_idx: int = 1, terminus_idx: int = 1, ) -> list: """ .. versionadded:: 3.1.0 initial_idx, terminus_idx Given a single initial point and terminus point, returns a list of longitude/latitude pairs describing npts equally spaced intermediate points along the geodesic between the initial and terminus points. Similar to inv_intermediate(), but with less options. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of Boston and Portland. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> # find ten equally spaced points between Boston and Portland. >>> lonlats = g.npts(boston_lon,boston_lat,portland_lon,portland_lat,10) >>> for lon,lat in lonlats: f'{lat:.3f} {lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' >>> # test with radians=True (inputs/outputs in radians, not degrees) >>> import math >>> dg2rad = math.radians(1.) >>> rad2dg = math.degrees(1.) >>> lonlats = g.npts( ... dg2rad*boston_lon, ... dg2rad*boston_lat, ... dg2rad*portland_lon, ... dg2rad*portland_lat, ... 10, ... radians=True ... ) >>> for lon,lat in lonlats: f'{rad2dg*lat:.3f} {rad2dg*lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' Parameters ---------- lon1: float Longitude of the initial point lat1: float Latitude of the initial point lon2: float Longitude of the terminus point lat2: float Latitude of the terminus point npts: int Number of points to be returned (including initial and/or terminus points, if required) radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. initial_idx: int, default=1 if initial_idx==0 then the initial point would be included in the output (as the first point) terminus_idx: int, default=1 if terminus_idx==0 then the terminus point would be included in the output (as the last point) Returns ------- list of tuples: list of (lon, lat) points along the geodesic between the initial and terminus points. """ res = self._inv_or_fwd_intermediate( lon1=lon1, lat1=lat1, lon2_or_azi1=lon2, lat2=lat2, npts=npts, del_s=0, radians=radians, initial_idx=initial_idx, terminus_idx=terminus_idx, flags=GeodIntermediateFlag.AZIS_DISCARD, out_lons=None, out_lats=None, out_azis=None, return_back_azimuth=False, is_fwd=False, ) return list(zip(res.lons, res.lats)) def inv_intermediate( self, lon1: float, lat1: float, lon2: float, lat2: float, npts: int = 0, del_s: float = 0, initial_idx: int = 1, terminus_idx: int = 1, radians: bool = False, flags: GeodIntermediateFlag = GeodIntermediateFlag.DEFAULT, out_lons: Any | None = None, out_lats: Any | None = None, out_azis: Any | None = None, return_back_azimuth: bool | None = None, ) -> GeodIntermediateReturn: """ .. versionadded:: 3.1.0 .. versionadded:: 3.5.0 return_back_azimuth Given a single initial point and terminus point, and the number of points, returns a list of longitude/latitude pairs describing npts equally spaced intermediate points along the geodesic between the initial and terminus points. npts and del_s parameters are mutually exclusive: if npts != 0: it calculates the distance between the points by the distance between the initial point and the terminus point divided by npts (the number of intermediate points) else: it calculates the number of intermediate points by dividing the distance between the initial and terminus points by del_s (delimiter distance between two successive points) Similar to npts(), but with more options. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of Boston and Portland. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> # find ten equally spaced points between Boston and Portland. >>> r = g.inv_intermediate(boston_lon,boston_lat,portland_lon,portland_lat,10) >>> for lon,lat in zip(r.lons, r.lats): f'{lat:.3f} {lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' >>> # test with radians=True (inputs/outputs in radians, not degrees) >>> import math >>> dg2rad = math.radians(1.) >>> rad2dg = math.degrees(1.) >>> r = g.inv_intermediate( ... dg2rad*boston_lon, ... dg2rad*boston_lat, ... dg2rad*portland_lon, ... dg2rad*portland_lat, ... 10, ... radians=True ... ) >>> for lon,lat in zip(r.lons, r.lats): f'{rad2dg*lat:.3f} {rad2dg*lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' Parameters ---------- lon1: float Longitude of the initial point lat1: float Latitude of the initial point lon2: float Longitude of the terminus point lat2: float Latitude of the terminus point npts: int, default=0 Number of points to be returned npts == 0 if del_s != 0 del_s: float, default=0 delimiter distance between two successive points del_s == 0 if npts != 0 radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. initial_idx: int, default=1 if initial_idx==0 then the initial point would be included in the output (as the first point) terminus_idx: int, default=1 if terminus_idx==0 then the terminus point would be included in the output (as the last point) flags: GeodIntermediateFlag, default=GeodIntermediateFlag.DEFAULT * 1st - round/ceil/trunc (see ``GeodIntermediateFlag.NPTS_*``) * 2nd - update del_s to the new npts or not (see ``GeodIntermediateFlag.DEL_S_*``) * 3rd - if out_azis=None, indicates if to save or discard the azimuths (see ``GeodIntermediateFlag.AZIS_*``) * default - round npts, update del_s accordingly, discard azis out_lons: array, :class:`numpy.ndarray`, optional Longitude(s) of the intermediate point(s) If None then buffers would be allocated internnaly out_lats: array, :class:`numpy.ndarray`, optional Latitudes(s) of the intermediate point(s) If None then buffers would be allocated internnaly out_azis: array, :class:`numpy.ndarray`, optional az12(s) of the intermediate point(s) If None then buffers would be allocated internnaly unless requested otherwise by the flags return_back_azimuth: bool, default=True if True, out_azis will store the back azimuth, Otherwise, out_azis will store the forward azimuth. Returns ------- GeodIntermediateReturn: number of points, distance and output arrays (GeodIntermediateReturn docs) """ if return_back_azimuth is None: return_back_azimuth = True warnings.warn( "Back azimuth is being returned by default to be compatible with fwd()" "This is a breaking change for pyproj 3.5+." "To avoid this warning, set return_back_azimuth=True." "Otherwise, to restore old behaviour, set return_back_azimuth=False." "This warning will be removed in future version." ) return super()._inv_or_fwd_intermediate( lon1=lon1, lat1=lat1, lon2_or_azi1=lon2, lat2=lat2, npts=npts, del_s=del_s, radians=radians, initial_idx=initial_idx, terminus_idx=terminus_idx, flags=int(flags), out_lons=out_lons, out_lats=out_lats, out_azis=out_azis, return_back_azimuth=return_back_azimuth, is_fwd=False, ) def fwd_intermediate( self, lon1: float, lat1: float, azi1: float, npts: int, del_s: float, initial_idx: int = 1, terminus_idx: int = 1, radians: bool = False, flags: GeodIntermediateFlag = GeodIntermediateFlag.DEFAULT, out_lons: Any | None = None, out_lats: Any | None = None, out_azis: Any | None = None, return_back_azimuth: bool | None = None, ) -> GeodIntermediateReturn: """ .. versionadded:: 3.1.0 .. versionadded:: 3.5.0 return_back_azimuth Given a single initial point and azimuth, number of points (npts) and delimiter distance between two successive points (del_s), returns a list of longitude/latitude pairs describing npts equally spaced intermediate points along the geodesic between the initial and terminus points. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of Boston and Portland. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> az12,az21,dist = g.inv(boston_lon,boston_lat,portland_lon,portland_lat) >>> # find ten equally spaced points between Boston and Portland. >>> npts = 10 >>> del_s = dist/(npts+1) >>> r = g.fwd_intermediate(boston_lon,boston_lat,az12,npts=npts,del_s=del_s) >>> for lon,lat in zip(r.lons, r.lats): f'{lat:.3f} {lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' >>> # test with radians=True (inputs/outputs in radians, not degrees) >>> import math >>> dg2rad = math.radians(1.) >>> rad2dg = math.degrees(1.) >>> r = g.fwd_intermediate( ... dg2rad*boston_lon, ... dg2rad*boston_lat, ... dg2rad*az12, ... npts=npts, ... del_s=del_s, ... radians=True ... ) >>> for lon,lat in zip(r.lons, r.lats): f'{rad2dg*lat:.3f} {rad2dg*lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' Parameters ---------- lon1: float Longitude of the initial point lat1: float Latitude of the initial point azi1: float Azimuth from the initial point towards the terminus point npts: int Number of points to be returned (including initial and/or terminus points, if required) del_s: float delimiter distance between two successive points radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. initial_idx: int, default=1 if initial_idx==0 then the initial point would be included in the output (as the first point) terminus_idx: int, default=1 if terminus_idx==0 then the terminus point would be included in the output (as the last point) flags: GeodIntermediateFlag, default=GeodIntermediateFlag.DEFAULT * 1st - round/ceil/trunc (see ``GeodIntermediateFlag.NPTS_*``) * 2nd - update del_s to the new npts or not (see ``GeodIntermediateFlag.DEL_S_*``) * 3rd - if out_azis=None, indicates if to save or discard the azimuths (see ``GeodIntermediateFlag.AZIS_*``) * default - round npts, update del_s accordingly, discard azis out_lons: array, :class:`numpy.ndarray`, optional Longitude(s) of the intermediate point(s) If None then buffers would be allocated internnaly out_lats: array, :class:`numpy.ndarray`, optional Latitudes(s) of the intermediate point(s) If None then buffers would be allocated internnaly out_azis: array, :class:`numpy.ndarray`, optional az12(s) of the intermediate point(s) If None then buffers would be allocated internnaly unless requested otherwise by the flags return_back_azimuth: bool, default=True if True, out_azis will store the back azimuth, Otherwise, out_azis will store the forward azimuth. Returns ------- GeodIntermediateReturn: number of points, distance and output arrays (GeodIntermediateReturn docs) """ if return_back_azimuth is None: return_back_azimuth = True warnings.warn( "Back azimuth is being returned by default to be compatible with inv()" "This is a breaking change for pyproj 3.5+." "To avoid this warning, set return_back_azimuth=True." "Otherwise, to restore old behaviour, set return_back_azimuth=False." "This warning will be removed in future version." ) return super()._inv_or_fwd_intermediate( lon1=lon1, lat1=lat1, lon2_or_azi1=azi1, lat2=math.nan, npts=npts, del_s=del_s, radians=radians, initial_idx=initial_idx, terminus_idx=terminus_idx, flags=int(flags), out_lons=out_lons, out_lats=out_lats, out_azis=out_azis, return_back_azimuth=return_back_azimuth, is_fwd=True, ) def line_length(self, lons: Any, lats: Any, radians: bool = False) -> float: """ .. versionadded:: 2.3.0 Calculate the total distance between points along a line (meters). >>> from pyproj import Geod >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> total_length = geod.line_length(lons, lats) >>> f"{total_length:.3f}" '14259605.611' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar The longitude points along a line. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar The latitude points along a line. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- float: The total length of the line (meters). """ # process inputs, making copies that support buffer API. inx = _copytobuffer(lons)[0] iny = _copytobuffer(lats)[0] return self._line_length(inx, iny, radians=radians) def line_lengths(self, lons: Any, lats: Any, radians: bool = False) -> Any: """ .. versionadded:: 2.3.0 Calculate the distances between points along a line (meters). >>> from pyproj import Geod >>> geod = Geod(ellps="WGS84") >>> lats = [-72.9, -71.9, -74.9] >>> lons = [-74, -102, -102] >>> for line_length in geod.line_lengths(lons, lats): ... f"{line_length:.3f}" '943065.744' '334805.010' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar The longitude points along a line. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar The latitude points along a line. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- array, :class:`numpy.ndarray`, list, tuple, or scalar: The total length of the line (meters). """ # process inputs, making copies that support buffer API. inx, x_data_type = _copytobuffer(lons) iny = _copytobuffer(lats)[0] self._line_length(inx, iny, radians=radians) line_lengths = _convertback(x_data_type, inx) return line_lengths if x_data_type == DataType.FLOAT else line_lengths[:-1] def polygon_area_perimeter( self, lons: Any, lats: Any, radians: bool = False ) -> tuple[float, float]: """ .. versionadded:: 2.3.0 A simple interface for computing the area (meters^2) and perimeter (meters) of a geodesic polygon. Arbitrarily complex polygons are allowed. In the case self-intersecting of polygons the area is accumulated "algebraically", e.g., the areas of the 2 loops in a figure-8 polygon will partially cancel. There's no need to "close" the polygon by repeating the first vertex. The area returned is signed with counter-clockwise traversal being treated as positive. .. note:: lats should be in the range [-90 deg, 90 deg]. Example usage: >>> from pyproj import Geod >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> poly_area, poly_perimeter = geod.polygon_area_perimeter(lons, lats) >>> f"{poly_area:.1f} {poly_perimeter:.1f}" '13376856682207.4 14710425.4' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar An array of longitude values. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar An array of latitude values. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- (float, float): The geodesic area (meters^2) and perimeter (meters) of the polygon. """ return self._polygon_area_perimeter( _copytobuffer(lons)[0], _copytobuffer(lats)[0], radians=radians ) def geometry_length(self, geometry, radians: bool = False) -> float: """ .. versionadded:: 2.3.0 Returns the geodesic length (meters) of the shapely geometry. If it is a Polygon, it will return the sum of the lengths along the perimeter. If it is a MultiPolygon or MultiLine, it will return the sum of the lengths. Example usage: >>> from pyproj import Geod >>> from shapely.geometry import Point, LineString >>> line_string = LineString([Point(1, 2), Point(3, 4)]) >>> geod = Geod(ellps="WGS84") >>> f"{geod.geometry_length(line_string):.3f}" '313588.397' Parameters ---------- geometry: :class:`shapely.geometry.BaseGeometry` The geometry to calculate the length from. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- float: The total geodesic length of the geometry (meters). """ try: return self.line_length(*geometry.xy, radians=radians) # type: ignore[misc] except (AttributeError, NotImplementedError): pass if hasattr(geometry, "exterior"): return self.geometry_length(geometry.exterior, radians=radians) if hasattr(geometry, "geoms"): total_length = 0.0 for geom in geometry.geoms: total_length += self.geometry_length(geom, radians=radians) return total_length raise GeodError("Invalid geometry provided.") def geometry_area_perimeter( self, geometry, radians: bool = False ) -> tuple[float, float]: """ .. versionadded:: 2.3.0 A simple interface for computing the area (meters^2) and perimeter (meters) of a geodesic polygon as a shapely geometry. Arbitrarily complex polygons are allowed. In the case self-intersecting of polygons the area is accumulated "algebraically", e.g., the areas of the 2 loops in a figure-8 polygon will partially cancel. There's no need to "close" the polygon by repeating the first vertex. .. note:: lats should be in the range [-90 deg, 90 deg]. .. warning:: The area returned is signed with counter-clockwise (CCW) traversal being treated as positive. For polygons, holes should use the opposite traversal to the exterior (if the exterior is CCW, the holes/interiors should be CW). You can use `shapely.ops.orient` to modify the orientation. If it is a Polygon, it will return the area and exterior perimeter. It will subtract the area of the interior holes. If it is a MultiPolygon or MultiLine, it will return the sum of the areas and perimeters of all geometries. Example usage: >>> from pyproj import Geod >>> from shapely.geometry import LineString, Point, Polygon >>> geod = Geod(ellps="WGS84") >>> poly_area, poly_perimeter = geod.geometry_area_perimeter( ... Polygon( ... LineString([ ... Point(1, 1), Point(10, 1), Point(10, 10), Point(1, 10) ... ]), ... holes=[LineString([Point(1, 2), Point(3, 4), Point(5, 2)])], ... ) ... ) >>> f"{poly_area:.0f} {poly_perimeter:.0f}" '944373881400 3979008' Parameters ---------- geometry: :class:`shapely.geometry.BaseGeometry` The geometry to calculate the area and perimeter from. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- (float, float): The geodesic area (meters^2) and perimeter (meters) of the polygon. """ try: return self.polygon_area_perimeter( # type: ignore[misc] *geometry.xy, radians=radians ) except (AttributeError, NotImplementedError): pass # polygon if hasattr(geometry, "exterior"): total_area, total_perimeter = self.geometry_area_perimeter( geometry.exterior, radians=radians ) # subtract area of holes for hole in geometry.interiors: area, _ = self.geometry_area_perimeter(hole, radians=radians) total_area += area return total_area, total_perimeter # multi geometries if hasattr(geometry, "geoms"): total_area = 0.0 total_perimeter = 0.0 for geom in geometry.geoms: area, perimeter = self.geometry_area_perimeter(geom, radians=radians) total_area += area total_perimeter += perimeter return total_area, total_perimeter raise GeodError("Invalid geometry provided.") def __repr__(self) -> str: # search for ellipse name for ellps, vals in pj_ellps.items(): if self.a == vals["a"]: # self.sphere is True when self.f is zero or very close to # zero (0), so prevent divide by zero. if self.b == vals.get("b") or ( not self.sphere and (1.0 / self.f) == vals.get("rf") ): return f"{self.__class__.__name__}(ellps={ellps!r})" # no ellipse name found, call super class return super().__repr__() def __eq__(self, other: object) -> bool: """ equality operator == for Geod objects Example usage: >>> from pyproj import Geod >>> # Use Clarke 1866 ellipsoid. >>> gclrk1 = Geod(ellps='clrk66') >>> # Define Clarke 1866 using parameters >>> gclrk2 = Geod(a=6378206.4, b=6356583.8) >>> gclrk1 == gclrk2 True >>> # WGS 66 ellipsoid, PROJ style >>> gwgs66 = Geod('+ellps=WGS66') >>> # Naval Weapons Lab., 1965 ellipsoid >>> gnwl9d = Geod('+ellps=NWL9D') >>> # these ellipsoids are the same >>> gnwl9d == gwgs66 True >>> gclrk1 != gnwl9d # Clarke 1866 is unlike NWL9D True """ if not isinstance(other, _Geod): return False return self.__repr__() == other.__repr__() def reverse_azimuth(azi: Any, radians: bool = False) -> Any: """ Reverses the given azimuth (forward <-> backwards) .. versionadded:: 3.5.0 Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- azi: scalar or array The azimuth. radians: bool, default=False If True, the input data is assumed to be in radians. Otherwise, the data is assumed to be in degrees. Returns ------- scalar or array: The reversed azimuth (forward <-> backwards) """ inazi, azi_data_type = _copytobuffer(azi) _reverse_azimuth(inazi, radians=radians) return _convertback(azi_data_type, inazi) pyproj-3.7.1/pyproj/list.pyi000066400000000000000000000002421475425760300161010ustar00rootroot00000000000000def get_proj_operations_map() -> dict[str, str]: ... def get_ellps_map() -> dict[str, dict[str, float]]: ... def get_prime_meridians_map() -> dict[str, str]: ... pyproj-3.7.1/pyproj/list.pyx000066400000000000000000000027071475425760300161300ustar00rootroot00000000000000include "proj.pxi" def get_proj_operations_map(): """ Returns ------- dict: Operations supported by PROJ. """ cdef const PJ_OPERATIONS *proj_operations = proj_list_operations() cdef int iii = 0 operations_map = {} while proj_operations[iii].id != NULL: operations_map[proj_operations[iii].id] = \ proj_operations[iii].descr[0].split("\n\t")[0] iii += 1 return operations_map def get_ellps_map(): """ Returns ------- dict: Ellipsoids supported by PROJ. """ cdef const PJ_ELLPS *proj_ellps = proj_list_ellps() cdef int iii = 0 ellps_map = {} while proj_ellps[iii].id != NULL: major_key, major_val = proj_ellps[iii].major.split("=") ell_key, ell_val = proj_ellps[iii].ell.split("=") ellps_map[proj_ellps[iii].id] = { major_key: float(major_val), ell_key: float(ell_val), "description": proj_ellps[iii].name } iii += 1 return ellps_map def get_prime_meridians_map(): """ Returns ------- dict: Prime Meridians supported by PROJ. """ cdef const PJ_PRIME_MERIDIANS *prime_meridians = proj_list_prime_meridians() cdef int iii = 0 prime_meridians_map = {} while prime_meridians[iii].id != NULL: prime_meridians_map[prime_meridians[iii].id] = \ prime_meridians[iii].defn iii += 1 return prime_meridians_map pyproj-3.7.1/pyproj/network.py000066400000000000000000000036021475425760300164510ustar00rootroot00000000000000""" Module for managing the PROJ network settings. """ import os from pathlib import Path import certifi from pyproj._context import _set_context_ca_bundle_path from pyproj._network import ( # noqa: F401 pylint: disable=unused-import is_network_enabled, set_network_enabled, ) def set_ca_bundle_path(ca_bundle_path: Path | str | bool | None = None) -> None: """ .. versionadded:: 3.0.0 Sets the path to the CA Bundle used by the `curl` built into PROJ when PROJ network is enabled. See: :c:func:`proj_context_set_ca_bundle_path` Environment variables: - PROJ_CURL_CA_BUNDLE - CURL_CA_BUNDLE - SSL_CERT_FILE Parameters ---------- ca_bundle_path: Path | str | bool | None, optional Default is None, which only uses the `certifi` package path as a fallback if the environment variables are not set. If a path is passed in, then that will be the path used. If it is set to True, then it will default to using the path provided, by the `certifi` package. If it is set to False or an empty string then it will default to the system settings or environment variables. """ env_var_names = ("PROJ_CURL_CA_BUNDLE", "CURL_CA_BUNDLE", "SSL_CERT_FILE") if ca_bundle_path is False: # need to reset CA Bundle path to use system settings # or environment variables because it # could have been changed by the user previously ca_bundle_path = "" elif isinstance(ca_bundle_path, (str, Path)): ca_bundle_path = str(ca_bundle_path) elif (ca_bundle_path is True) or not any( env_var_name in os.environ for env_var_name in env_var_names ): ca_bundle_path = certifi.where() else: # reset CA Bundle path to use system settings # or environment variables ca_bundle_path = "" _set_context_ca_bundle_path(ca_bundle_path) pyproj-3.7.1/pyproj/proj.pxi000066400000000000000000000531731475425760300161120ustar00rootroot00000000000000# PROJ API Definition cdef extern from "proj.h" nogil: cdef int PROJ_VERSION_MAJOR cdef int PROJ_VERSION_MINOR cdef int PROJ_VERSION_PATCH void proj_context_set_search_paths( PJ_CONTEXT *ctx, int count_paths, const char* const* paths) int proj_context_set_database_path(PJ_CONTEXT *ctx, const char *dbPath, const char *const *auxDbPaths, const char* const *options) void proj_context_set_ca_bundle_path(PJ_CONTEXT *ctx, const char *path) const char *proj_context_get_database_metadata(PJ_CONTEXT* ctx, const char* key) ctypedef struct PJ ctypedef struct PJ_CONTEXT PJ_CONTEXT *proj_context_create () PJ_CONTEXT *proj_context_clone (PJ_CONTEXT *ctx) PJ_CONTEXT *proj_context_destroy (PJ_CONTEXT *ctx) void proj_assign_context(PJ* pj, PJ_CONTEXT* ctx) ctypedef enum PJ_LOG_LEVEL: PJ_LOG_NONE PJ_LOG_ERROR PJ_LOG_DEBUG PJ_LOG_TRACE PJ_LOG_TELL ctypedef void (*PJ_LOG_FUNCTION)(void *, int, const char *) void proj_log_func (PJ_CONTEXT *ctx, void *app_data, PJ_LOG_FUNCTION logf) int proj_errno (const PJ *P) const char * proj_context_errno_string(PJ_CONTEXT* ctx, int err) int proj_errno_reset (const PJ *P) PJ *proj_create (PJ_CONTEXT *ctx, const char *definition) PJ *proj_normalize_for_visualization(PJ_CONTEXT *ctx, const PJ* obj) ctypedef struct PJ_INFO: int major # Major release number int minor # Minor release number int patch # Patch level const char *release # Release info. Version + date const char *version # Full version number const char *searchpath # Paths where init and grid files are # looked for. Paths are separated by # semi-colons on Windows, and colons # on non-Windows platforms. const char *const *paths size_t path_count PJ_INFO proj_info() ctypedef struct PJ_PROJ_INFO: const char *id const char *description const char *definition int has_inverse #1 if an inverse mapping exists, 0 otherwise */ double accuracy PJ_PROJ_INFO proj_pj_info(PJ *P) ctypedef struct PJ_XYZT: double x, y, z, t ctypedef struct PJ_UVWT: double u, v, w, t ctypedef struct PJ_LPZT: double lam, phi, z, t ctypedef struct PJ_OPK: double o, p, k ctypedef struct PJ_ENU: double e, n, u ctypedef struct PJ_GEOD: double s, a1, a2 ctypedef struct PJ_UV: double u, v ctypedef struct PJ_XY: double x, y ctypedef struct PJ_LP: double lam, phi ctypedef struct PJ_XYZ: double x, y, z ctypedef struct PJ_UVW: double u, v, w ctypedef struct PJ_LPZ: double lam, phi, z ctypedef union PJ_COORD: double v[4]; PJ_XYZT xyzt; PJ_UVWT uvwt; PJ_LPZT lpzt; PJ_GEOD geod; PJ_OPK opk; PJ_ENU enu; PJ_XYZ xyz; PJ_UVW uvw; PJ_LPZ lpz; PJ_XY xy; PJ_UV uv; PJ_LP lp; PJ_COORD proj_coord (double x, double y, double z, double t) ctypedef enum PJ_DIRECTION: PJ_FWD # Forward PJ_IDENT # Do nothing PJ_INV # Inverse int proj_angular_input (PJ *P, PJ_DIRECTION dir) int proj_angular_output (PJ *P, PJ_DIRECTION dir) int proj_degree_input (PJ *P, PJ_DIRECTION dir) int proj_degree_output (PJ *P, PJ_DIRECTION dir) PJ_COORD proj_trans (PJ *P, PJ_DIRECTION direction, PJ_COORD coord) size_t proj_trans_generic ( PJ *P, PJ_DIRECTION direction, double *x, size_t sx, size_t nx, double *y, size_t sy, size_t ny, double *z, size_t sz, size_t nz, double *t, size_t st, size_t nt ) int proj_trans_bounds( PJ_CONTEXT* context, PJ *P, PJ_DIRECTION direction, const double xmin, const double ymin, const double xmax, const double ymax, double* out_xmin, double* out_ymin, double* out_xmax, double* out_ymax, int densify_pts ) PJ* proj_trans_get_last_used_operation(PJ *P) ctypedef struct PJ_AREA PJ *proj_create_crs_to_crs_from_pj( PJ_CONTEXT *ctx, PJ *source_crs, PJ *target_crs, PJ_AREA *area, const char* const *options, ) ctypedef enum PJ_COMPARISON_CRITERION: PJ_COMP_STRICT PJ_COMP_EQUIVALENT PJ_COMP_EQUIVALENT_EXCEPT_AXIS_ORDER_GEOGCRS void proj_destroy(PJ *obj) int proj_is_equivalent_to_with_ctx(PJ_CONTEXT *ctx, const PJ *obj, const PJ *other, PJ_COMPARISON_CRITERION criterion) const char* proj_get_id_auth_name(const PJ *obj, int index) const char* proj_get_id_code(const PJ *obj, int index) int proj_get_area_of_use(PJ_CONTEXT *ctx, const PJ *obj, double* out_west_lon_degree, double* out_south_lat_degree, double* out_east_lon_degree, double* out_north_lat_degree, const char **out_area_name) PJ_AREA *proj_area_create() void proj_area_set_bbox(PJ_AREA *area, double west_lon_degree, double south_lat_degree, double east_lon_degree, double north_lat_degree) void proj_area_destroy(PJ_AREA* area) ctypedef enum PJ_WKT_TYPE: PJ_WKT2_2015 PJ_WKT2_2015_SIMPLIFIED PJ_WKT2_2019 PJ_WKT2_2019_SIMPLIFIED PJ_WKT1_GDAL PJ_WKT1_ESRI const char* proj_as_wkt(PJ_CONTEXT *ctx, const PJ *obj, PJ_WKT_TYPE type, const char* const *options) ctypedef enum PJ_PROJ_STRING_TYPE: PJ_PROJ_5 PJ_PROJ_4 const char* proj_as_proj_string(PJ_CONTEXT *ctx, const PJ *obj, PJ_PROJ_STRING_TYPE type, const char* const *options) const char* proj_as_projjson(PJ_CONTEXT *ctx, const PJ *obj, const char* const *options) PJ *proj_crs_get_geodetic_crs(PJ_CONTEXT *ctx, const PJ *crs) ctypedef enum PJ_TYPE: PJ_TYPE_UNKNOWN PJ_TYPE_ELLIPSOID PJ_TYPE_PRIME_MERIDIAN PJ_TYPE_GEODETIC_REFERENCE_FRAME PJ_TYPE_DYNAMIC_GEODETIC_REFERENCE_FRAME PJ_TYPE_VERTICAL_REFERENCE_FRAME PJ_TYPE_DYNAMIC_VERTICAL_REFERENCE_FRAME PJ_TYPE_DATUM_ENSEMBLE PJ_TYPE_CRS PJ_TYPE_GEODETIC_CRS PJ_TYPE_GEOCENTRIC_CRS PJ_TYPE_GEOGRAPHIC_CRS PJ_TYPE_GEOGRAPHIC_2D_CRS PJ_TYPE_GEOGRAPHIC_3D_CRS PJ_TYPE_VERTICAL_CRS PJ_TYPE_PROJECTED_CRS PJ_TYPE_COMPOUND_CRS PJ_TYPE_TEMPORAL_CRS PJ_TYPE_ENGINEERING_CRS PJ_TYPE_BOUND_CRS PJ_TYPE_OTHER_CRS PJ_TYPE_CONVERSION PJ_TYPE_TRANSFORMATION PJ_TYPE_CONCATENATED_OPERATION PJ_TYPE_OTHER_COORDINATE_OPERATION PJ_TYPE_TEMPORAL_DATUM PJ_TYPE_ENGINEERING_DATUM PJ_TYPE_PARAMETRIC_DATUM PJ_TYPE_DERIVED_PROJECTED_CRS PJ_TYPE proj_get_type(const PJ *obj) const char* proj_get_name(const PJ *obj) const char* proj_get_remarks(const PJ *obj) const char* proj_get_scope(const PJ *obj) int proj_is_crs(const PJ *obj) int proj_is_derived_crs(PJ_CONTEXT *ctx, const PJ* crs) PJ *proj_crs_get_datum(PJ_CONTEXT *ctx, const PJ *crs) PJ *proj_crs_get_horizontal_datum(PJ_CONTEXT *ctx, const PJ *crs) ctypedef enum PJ_COORDINATE_SYSTEM_TYPE: PJ_CS_TYPE_UNKNOWN PJ_CS_TYPE_CARTESIAN PJ_CS_TYPE_ELLIPSOIDAL PJ_CS_TYPE_VERTICAL PJ_CS_TYPE_SPHERICAL PJ_CS_TYPE_ORDINAL PJ_CS_TYPE_PARAMETRIC PJ_CS_TYPE_DATETIMETEMPORAL PJ_CS_TYPE_TEMPORALCOUNT PJ_CS_TYPE_TEMPORALMEASURE PJ *proj_crs_get_coordinate_system(PJ_CONTEXT *ctx, const PJ *crs) PJ_COORDINATE_SYSTEM_TYPE proj_cs_get_type(PJ_CONTEXT *ctx, const PJ *cs) int proj_cs_get_axis_count(PJ_CONTEXT *ctx, const PJ *cs) int proj_cs_get_axis_info(PJ_CONTEXT *ctx, const PJ *cs, int index, const char **out_name, const char **out_abbrev, const char **out_direction, double *out_unit_conv_factor, const char **out_unit_name, const char **out_unit_auth_name, const char **out_unit_code) PJ *proj_get_ellipsoid(PJ_CONTEXT *ctx, const PJ *obj) int proj_ellipsoid_get_parameters(PJ_CONTEXT *ctx, const PJ *ellipsoid, double *out_semi_major_metre, double *out_semi_minor_metre, int *out_is_semi_minor_computed, double *out_inv_flattening) PJ *proj_get_prime_meridian(PJ_CONTEXT *ctx, const PJ *obj) int proj_prime_meridian_get_parameters(PJ_CONTEXT *ctx, const PJ *prime_meridian, double *out_longitude, double *out_unit_conv_factor, const char **out_unit_name) PJ *proj_crs_get_sub_crs(PJ_CONTEXT *ctx, const PJ *crs, int index) PJ *proj_get_source_crs(PJ_CONTEXT *ctx, const PJ *obj) PJ *proj_get_target_crs(PJ_CONTEXT *ctx, const PJ *obj) ctypedef struct PJ_OBJ_LIST PJ_OBJ_LIST *proj_identify(PJ_CONTEXT *ctx, const PJ* obj, const char *auth_name, const char* const *options, int **out_confidence) PJ *proj_list_get(PJ_CONTEXT *ctx, const PJ_OBJ_LIST *result, int index) int proj_list_get_count(const PJ_OBJ_LIST *result) void proj_list_destroy(PJ_OBJ_LIST *result) void proj_int_list_destroy(int* list) void proj_context_use_proj4_init_rules(PJ_CONTEXT *ctx, int enable) ctypedef enum PJ_GUESSED_WKT_DIALECT: PJ_GUESSED_WKT2_2018 PJ_GUESSED_WKT2_2015 PJ_GUESSED_WKT1_GDAL PJ_GUESSED_WKT1_ESRI PJ_GUESSED_NOT_WKT PJ_GUESSED_WKT_DIALECT proj_context_guess_wkt_dialect(PJ_CONTEXT *ctx, const char *wkt) ctypedef struct PJ_OPERATIONS: const char *id PJ *(*proj)(PJ *) const char * const *descr const PJ_OPERATIONS *proj_list_operations() ctypedef struct PJ_ELLPS: const char *id # ellipse keyword name const char *major # a= value const char *ell # elliptical parameter const char *name # comments const PJ_ELLPS *proj_list_ellps() ctypedef struct PJ_PRIME_MERIDIANS: const char *id const char *defn const PJ_PRIME_MERIDIANS *proj_list_prime_meridians() ctypedef char **PROJ_STRING_LIST void proj_string_list_destroy(PROJ_STRING_LIST list) PROJ_STRING_LIST proj_get_authorities_from_database(PJ_CONTEXT *ctx) PROJ_STRING_LIST proj_get_codes_from_database(PJ_CONTEXT *ctx, const char *auth_name, PJ_TYPE type, int allow_deprecated) ctypedef struct PROJ_CRS_INFO: char* auth_name char* code char* name PJ_TYPE type int deprecated int bbox_valid double west_lon_degree double south_lat_degree double east_lon_degree double north_lat_degree char* area_name char* projection_method_name ctypedef struct PROJ_CRS_LIST_PARAMETERS: const PJ_TYPE* types size_t typesCount int crs_area_of_use_contains_bbox int bbox_valid double west_lon_degree double south_lat_degree double east_lon_degree double north_lat_degree int allow_deprecated PROJ_CRS_LIST_PARAMETERS *proj_get_crs_list_parameters_create() void proj_get_crs_list_parameters_destroy(PROJ_CRS_LIST_PARAMETERS* params) PROJ_CRS_INFO **proj_get_crs_info_list_from_database( PJ_CONTEXT *ctx, const char *auth_name, const PROJ_CRS_LIST_PARAMETERS* params, int *out_result_count) void proj_crs_info_list_destroy(PROJ_CRS_INFO** list) PJ *proj_crs_get_coordoperation(PJ_CONTEXT *ctx, const PJ *crs) int proj_coordoperation_get_method_info(PJ_CONTEXT *ctx, const PJ *coordoperation, const char **out_method_name, const char **out_method_auth_name, const char **out_method_code) int proj_coordoperation_is_instantiable(PJ_CONTEXT *ctx, const PJ *coordoperation) int proj_coordoperation_has_ballpark_transformation(PJ_CONTEXT *ctx, const PJ *coordoperation) int proj_coordoperation_get_param_count(PJ_CONTEXT *ctx, const PJ *coordoperation) int proj_coordoperation_get_param_index(PJ_CONTEXT *ctx, const PJ *coordoperation, const char *name) int proj_coordoperation_get_param(PJ_CONTEXT *ctx, const PJ *coordoperation, int index, const char **out_name, const char **out_auth_name, const char **out_code, double *out_value, const char **out_value_string, double *out_unit_conv_factor, const char **out_unit_name, const char **out_unit_auth_name, const char **out_unit_code, const char **out_unit_category) int proj_coordoperation_get_grid_used_count(PJ_CONTEXT *ctx, const PJ *coordoperation) int proj_coordoperation_get_grid_used(PJ_CONTEXT *ctx, const PJ *coordoperation, int index, const char **out_short_name, const char **out_full_name, const char **out_package_name, const char **out_url, int *out_direct_download, int *out_open_license, int *out_available) double proj_coordoperation_get_accuracy(PJ_CONTEXT *ctx, const PJ *obj) int proj_coordoperation_get_towgs84_values(PJ_CONTEXT *ctx, const PJ *coordoperation, double *out_values, int value_count, int emit_error_if_incompatible) int proj_concatoperation_get_step_count(PJ_CONTEXT *ctx, const PJ *concatoperation) PJ *proj_concatoperation_get_step(PJ_CONTEXT *ctx, const PJ *concatoperation, int i_step) ctypedef enum PJ_CATEGORY: PJ_CATEGORY_ELLIPSOID PJ_CATEGORY_PRIME_MERIDIAN PJ_CATEGORY_DATUM PJ_CATEGORY_CRS PJ_CATEGORY_COORDINATE_OPERATION PJ_CATEGORY_DATUM_ENSEMBLE PJ *proj_create_from_database(PJ_CONTEXT *ctx, const char *auth_name, const char *code, PJ_CATEGORY category, int usePROJAlternativeGridNames, const char* const *options) PJ_OBJ_LIST *proj_create_from_name(PJ_CONTEXT *ctx, const char *auth_name, const char *searchedName, const PJ_TYPE* types, size_t typesCount, int approximateMatch, size_t limitResultCount, const char* const *options) ctypedef struct PJ_OPERATION_FACTORY_CONTEXT PJ_OPERATION_FACTORY_CONTEXT *proj_create_operation_factory_context( PJ_CONTEXT *ctx, const char *authority ) void proj_operation_factory_context_destroy( PJ_OPERATION_FACTORY_CONTEXT *ctx ) PJ_OBJ_LIST *proj_create_operations( PJ_CONTEXT *ctx, const PJ *source_crs, const PJ *target_crs, const PJ_OPERATION_FACTORY_CONTEXT *operationContext ) void proj_operation_factory_context_set_grid_availability_use( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, PROJ_GRID_AVAILABILITY_USE use ) void proj_operation_factory_context_set_spatial_criterion( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, PROJ_SPATIAL_CRITERION criterion ) void proj_operation_factory_context_set_area_of_interest( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, double west_lon_degree, double south_lat_degree, double east_lon_degree, double north_lat_degree ) void proj_operation_factory_context_set_allow_ballpark_transformations( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, int allow ) void proj_operation_factory_context_set_discard_superseded( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, int discard ) void proj_operation_factory_context_set_desired_accuracy( PJ_CONTEXT *ctx, PJ_OPERATION_FACTORY_CONTEXT *factory_ctx, double accuracy ) ctypedef enum PROJ_SPATIAL_CRITERION: PROJ_SPATIAL_CRITERION_STRICT_CONTAINMENT PROJ_SPATIAL_CRITERION_PARTIAL_INTERSECTION ctypedef enum PROJ_GRID_AVAILABILITY_USE: PROJ_GRID_AVAILABILITY_USED_FOR_SORTING PROJ_GRID_AVAILABILITY_DISCARD_OPERATION_IF_MISSING_GRID PROJ_GRID_AVAILABILITY_IGNORED PROJ_GRID_AVAILABILITY_KNOWN_AVAILABLE ctypedef struct PJ_FACTORS: double meridional_scale double parallel_scale double areal_scale double angular_distortion double meridian_parallel_angle double meridian_convergence double tissot_semimajor double tissot_semiminor double dx_dlam double dx_dphi double dy_dlam double dy_dphi PJ_FACTORS proj_factors(PJ *P, PJ_COORD lp) # neworking related const char *proj_context_get_user_writable_directory(PJ_CONTEXT *ctx, int create) int proj_context_set_enable_network(PJ_CONTEXT* ctx, int enabled) int proj_context_is_network_enabled(PJ_CONTEXT* ctx) # units ctypedef struct PROJ_UNIT_INFO: # Authority name. char* auth_name # Object code. char* code # Object name. For example "metre", "US survey foot", etc. */ char* name # Category of the unit: one of "linear", "linear_per_time", "angular", # "angular_per_time", "scale", "scale_per_time" or "time" */ char* category # Conversion factor to apply to transform from that unit to the # corresponding SI unit (metre for "linear", radian for "angular", etc.). # It might be 0 in some cases to indicate no known conversion factor. double conv_factor # PROJ short name, like "m", "ft", "us-ft", etc... Might be NULL */ char* proj_short_name # Whether the object is deprecated int deprecated PROJ_UNIT_INFO **proj_get_units_from_database( PJ_CONTEXT *ctx, const char *auth_name, const char *category, int allow_deprecated, int *out_result_count, ) void proj_unit_list_destroy(PROJ_UNIT_INFO** list) const char *proj_context_get_url_endpoint(PJ_CONTEXT* ctx) int proj_is_deprecated(const PJ *obj) PJ_OBJ_LIST *proj_get_non_deprecated(PJ_CONTEXT *ctx, const PJ *obj) pyproj-3.7.1/pyproj/proj.py000066400000000000000000000257471475425760300157500ustar00rootroot00000000000000""" Performs cartographic transformations (converts from longitude,latitude to native map projection x,y coordinates and vice versa) using PROJ (https://proj.org). A Proj class instance is initialized with proj map projection control parameter key/value pairs. The key/value pairs can either be passed in a dictionary, or as keyword arguments, or as a PROJ string (compatible with the proj command). See :ref:`projections` for examples of key/value pairs defining different map projections. Calling a Proj class instance with the arguments lon, lat will convert lon/lat (in degrees) to x/y native map projection coordinates (in meters). """ import re import warnings from typing import Any from pyproj._compat import cstrencode from pyproj._transformer import Factors from pyproj.crs import CRS from pyproj.enums import TransformDirection from pyproj.list import get_proj_operations_map from pyproj.transformer import Transformer, TransformerFromPipeline from pyproj.utils import _convertback, _copytobuffer pj_list = get_proj_operations_map() class Proj(Transformer): """ Performs cartographic transformations. Converts from longitude, latitude to native map projection x,y coordinates and vice versa using PROJ (https://proj.org). Attributes ---------- srs: str The string form of the user input used to create the Proj. crs: pyproj.crs.CRS The CRS object associated with the Proj. """ def __init__( self, projparams: Any | None = None, preserve_units: bool = True, **kwargs ) -> None: """ A Proj class instance is initialized with proj map projection control parameter key/value pairs. The key/value pairs can either be passed in a dictionary, or as keyword arguments, or as a PROJ string (compatible with the proj command). See :ref:`projections` for examples of key/value pairs defining different map projections. Parameters ---------- projparams: int, str, dict, pyproj.CRS A PROJ or WKT string, PROJ dict, EPSG integer, or a pyproj.CRS instance. preserve_units: bool If false, will ensure +units=m. **kwargs: PROJ projection parameters. Example usage: >>> from pyproj import Proj >>> p = Proj(proj='utm',zone=10,ellps='WGS84', preserve_units=False) >>> x,y = p(-120.108, 34.36116666) >>> 'x=%9.3f y=%11.3f' % (x,y) 'x=765975.641 y=3805993.134' >>> 'lon=%8.3f lat=%5.3f' % p(x,y,inverse=True) 'lon=-120.108 lat=34.361' >>> # do 3 cities at a time in a tuple (Fresno, LA, SF) >>> lons = (-119.72,-118.40,-122.38) >>> lats = (36.77, 33.93, 37.62 ) >>> x,y = p(lons, lats) >>> 'x: %9.3f %9.3f %9.3f' % x 'x: 792763.863 925321.537 554714.301' >>> 'y: %9.3f %9.3f %9.3f' % y 'y: 4074377.617 3763936.941 4163835.303' >>> lons, lats = p(x, y, inverse=True) # inverse transform >>> 'lons: %8.3f %8.3f %8.3f' % lons 'lons: -119.720 -118.400 -122.380' >>> 'lats: %8.3f %8.3f %8.3f' % lats 'lats: 36.770 33.930 37.620' >>> p2 = Proj('+proj=utm +zone=10 +ellps=WGS84', preserve_units=False) >>> x,y = p2(-120.108, 34.36116666) >>> 'x=%9.3f y=%11.3f' % (x,y) 'x=765975.641 y=3805993.134' >>> p = Proj("EPSG:32667", preserve_units=False) >>> 'x=%12.3f y=%12.3f (meters)' % p(-114.057222, 51.045) 'x=-1783506.250 y= 6193827.033 (meters)' >>> p = Proj("EPSG:32667") >>> 'x=%12.3f y=%12.3f (feet)' % p(-114.057222, 51.045) 'x=-5851386.754 y=20320914.191 (feet)' >>> # test data with radian inputs >>> p1 = Proj("EPSG:4214") >>> x1, y1 = p1(116.366, 39.867) >>> f'{x1:.3f} {y1:.3f}' '116.366 39.867' >>> x2, y2 = p1(x1, y1, inverse=True) >>> f'{x2:.3f} {y2:.3f}' '116.366 39.867' """ self.crs = CRS.from_user_input(projparams, **kwargs) # make sure units are meters if preserve_units is False. if not preserve_units and "foot" in self.crs.axis_info[0].unit_name: # ignore export to PROJ string deprecation warning with warnings.catch_warnings(): warnings.filterwarnings( "ignore", "You will likely lose important projection information", UserWarning, ) projstring = self.crs.to_proj4(4) projstring = re.sub(r"\s\+units=[\w-]+", "", projstring) projstring += " +units=m" self.crs = CRS(projstring) # ignore export to PROJ string deprecation warning with warnings.catch_warnings(): warnings.filterwarnings( "ignore", "You will likely lose important projection information", UserWarning, ) projstring = self.crs.to_proj4() or self.crs.srs self.srs = re.sub(r"\s\+?type=crs", "", projstring).strip() super().__init__(TransformerFromPipeline(cstrencode(self.srs))) def __call__( self, longitude: Any, latitude: Any, inverse: bool = False, errcheck: bool = False, radians: bool = False, ) -> tuple[Any, Any]: """ Calling a Proj class instance with the arguments lon, lat will convert lon/lat (in degrees) to x/y native map projection coordinates (in meters). Inputs should be doubles (they will be cast to doubles if they are not, causing a slight performance hit). Works with numpy and regular python array objects, python sequences and scalars, but is fastest for array objects. Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- longitude: scalar or array Input longitude coordinate(s). latitude: scalar or array Input latitude coordinate(s). inverse: bool, default=False If inverse is True the inverse transformation from x/y to lon/lat is performed. radians: bool, default=False If True, will expect input data to be in radians and will return radians if the projection is geographic. Otherwise, it uses degrees. This does not work with pyproj 2 and is ignored. It will be enabled again in pyproj 3. errcheck: bool, default=False If True, an exception is raised if the errors are found in the process. If False, ``inf`` is returned for errors. Returns ------- tuple[Any, Any]: The transformed coordinates. """ if inverse: direction = TransformDirection.INVERSE else: direction = TransformDirection.FORWARD return self.transform( xx=longitude, yy=latitude, direction=direction, errcheck=errcheck, radians=radians, ) def get_factors( self, longitude: Any, latitude: Any, radians: bool = False, errcheck: bool = False, ) -> Factors: """ .. versionadded:: 2.6.0 Calculate various cartographic properties, such as scale factors, angular distortion and meridian convergence. Depending on the underlying projection values will be calculated either numerically (default) or analytically. The function also calculates the partial derivatives of the given coordinate. Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- longitude: scalar or array Input longitude coordinate(s). latitude: scalar or array Input latitude coordinate(s). radians: bool, default=False If True, will expect input data to be in radians and will return radians if the projection is geographic. Otherwise, it uses degrees. errcheck: bool, default=False If True, an exception is raised if the errors are found in the process. If False, ``inf`` is returned on error. Returns ------- Factors """ # process inputs, making copies that support buffer API. inx, x_data_type = _copytobuffer(longitude) iny = _copytobuffer(latitude)[0] # calculate the factors factors = self._transformer._get_factors( inx, iny, radians=radians, errcheck=errcheck ) # if inputs were lists, tuples or floats, convert back. return Factors( meridional_scale=_convertback(x_data_type, factors.meridional_scale), parallel_scale=_convertback(x_data_type, factors.parallel_scale), areal_scale=_convertback(x_data_type, factors.areal_scale), angular_distortion=_convertback(x_data_type, factors.angular_distortion), meridian_parallel_angle=_convertback( x_data_type, factors.meridian_parallel_angle ), meridian_convergence=_convertback( x_data_type, factors.meridian_convergence ), tissot_semimajor=_convertback(x_data_type, factors.tissot_semimajor), tissot_semiminor=_convertback(x_data_type, factors.tissot_semiminor), dx_dlam=_convertback(x_data_type, factors.dx_dlam), dx_dphi=_convertback(x_data_type, factors.dx_dphi), dy_dlam=_convertback(x_data_type, factors.dy_dlam), dy_dphi=_convertback(x_data_type, factors.dy_dphi), ) def definition_string(self) -> str: """Returns formal definition string for projection >>> Proj("EPSG:4326").definition_string() 'proj=longlat datum=WGS84 no_defs ellps=WGS84 towgs84=0,0,0' """ return self.definition def to_latlong_def(self) -> str | None: """return the definition string of the geographic (lat/lon) coordinate version of the current projection""" return self.crs.geodetic_crs.to_proj4(4) if self.crs.geodetic_crs else None def to_latlong(self) -> "Proj": """return a new Proj instance which is the geographic (lat/lon) coordinate version of the current projection""" return Proj(self.crs.geodetic_crs) def __reduce__(self) -> tuple[type["Proj"], tuple[str]]: """special method that allows pyproj.Proj instance to be pickled""" return self.__class__, (self.crs.srs,) pyproj-3.7.1/pyproj/py.typed000066400000000000000000000000001475425760300160720ustar00rootroot00000000000000pyproj-3.7.1/pyproj/sync.py000066400000000000000000000207661475425760300157460ustar00rootroot00000000000000""" Based on the logic in the PROJ projsync CLI program https://github.com/OSGeo/PROJ/blob/9ff543c4ffd86152bc58d0a0164b2ce9ebbb8bec/src/apps/projsync.cpp """ import hashlib import json import os from datetime import datetime from functools import partial from pathlib import Path from typing import Any from urllib.request import urlretrieve from pyproj._sync import get_proj_endpoint from pyproj.aoi import BBox from pyproj.datadir import get_data_dir, get_user_data_dir def _bbox_from_coords(coords: list) -> BBox | None: """ Get the bounding box from coordinates """ try: xxx, yyy = zip(*coords) return BBox(west=min(xxx), south=min(yyy), east=max(xxx), north=max(yyy)) except ValueError: pass coord_bbox = None for coord_set in coords: bbox = _bbox_from_coords(coord_set) if bbox is None: continue if coord_bbox is None: coord_bbox = bbox else: coord_bbox.west = min(coord_bbox.west, bbox.west) coord_bbox.south = min(coord_bbox.south, bbox.south) coord_bbox.north = max(coord_bbox.north, bbox.north) coord_bbox.east = max(coord_bbox.east, bbox.east) return coord_bbox def _bbox_from_geom(geom: dict[str, Any]) -> BBox | None: """ Get the bounding box from geojson geometry """ if "coordinates" not in geom or "type" not in geom: return None coordinates = geom["coordinates"] if geom["type"] != "MultiPolygon": return _bbox_from_coords(coordinates) found_minus_180 = False found_plus_180 = False bboxes = [] for coordinate_set in coordinates: bbox = _bbox_from_coords(coordinate_set) if bbox is None: continue if bbox.west == -180: found_minus_180 = True elif bbox.east == 180: found_plus_180 = True bboxes.append(bbox) grid_bbox = None for bbox in bboxes: if found_minus_180 and found_plus_180 and bbox.west == -180: bbox.west = 180 bbox.east += 360 if grid_bbox is None: grid_bbox = bbox else: grid_bbox.west = min(grid_bbox.west, bbox.west) grid_bbox.south = min(grid_bbox.south, bbox.south) grid_bbox.north = max(grid_bbox.north, bbox.north) grid_bbox.east = max(grid_bbox.east, bbox.east) return grid_bbox def _filter_bbox( feature: dict[str, Any], bbox: BBox, spatial_test: str, include_world_coverage: bool ) -> bool: """ Filter by the bounding box. Designed to use with 'filter' """ geom = feature.get("geometry") if geom is not None: geom_bbox = _bbox_from_geom(geom) if geom_bbox is None: return False if ( geom_bbox.east - geom_bbox.west > 359 and geom_bbox.north - geom_bbox.south > 179 ): if not include_world_coverage: return False geom_bbox.west = -float("inf") geom_bbox.east = float("inf") elif geom_bbox.east > 180 and bbox.west < -180: geom_bbox.west -= 360 geom_bbox.east -= 360 return getattr(bbox, spatial_test)(geom_bbox) return False def _filter_properties( feature: dict[str, Any], source_id: str | None = None, area_of_use: str | None = None, filename: str | None = None, ) -> bool: """ Filter by the properties. Designed to use with 'filter' """ properties = feature.get("properties") if not properties: return False p_filename = properties.get("name") p_source_id = properties.get("source_id") if not p_filename or not p_source_id: return False source_id__matched = source_id is None or source_id in p_source_id area_of_use__matched = area_of_use is None or area_of_use in properties.get( "area_of_use", "" ) filename__matched = filename is None or filename in p_filename if source_id__matched and area_of_use__matched and filename__matched: return True return False def _is_download_needed(grid_name: str) -> bool: """ Run through all of the PROJ directories to see if the file already exists. """ if Path(get_user_data_dir(), grid_name).exists(): return False for data_dir in get_data_dir().split(os.pathsep): if Path(data_dir, grid_name).exists(): return False return True def _filter_download_needed(feature: dict[str, Any]) -> bool: """ Filter grids so only those that need to be downloaded are included. """ properties = feature.get("properties") if not properties: return False filename = properties.get("name") if not filename: return False return _is_download_needed(filename) def _sha256sum(input_file): """ Return sha256 checksum of file given by path. """ hasher = hashlib.sha256() with open(input_file, "rb") as file: for chunk in iter(lambda: file.read(65536), b""): hasher.update(chunk) return hasher.hexdigest() def _download_resource_file( file_url, short_name, directory, verbose=False, sha256=None ): """ Download resource file from PROJ url """ if verbose: print(f"Downloading: {file_url}") tmp_path = Path(directory, f"{short_name}.part") try: urlretrieve(file_url, tmp_path) if sha256 is not None and sha256 != _sha256sum(tmp_path): raise RuntimeError(f"SHA256 mismatch: {short_name}") tmp_path.replace(Path(directory, short_name)) finally: try: os.remove(tmp_path) except FileNotFoundError: pass def _load_grid_geojson(target_directory: str | Path | None = None) -> dict[str, Any]: """ Returns ------- dict[str, Any]: The PROJ grid data list. """ if target_directory is None: target_directory = get_user_data_dir(True) local_path = Path(target_directory, "files.geojson") if not local_path.exists() or ( (datetime.now() - datetime.fromtimestamp(local_path.stat().st_mtime)).days > 0 ): _download_resource_file( file_url=f"{get_proj_endpoint()}/files.geojson", short_name="files.geojson", directory=target_directory, ) return json.loads(local_path.read_text(encoding="utf-8")) def get_transform_grid_list( source_id: str | None = None, area_of_use: str | None = None, filename: str | None = None, bbox: BBox | None = None, spatial_test: str = "intersects", include_world_coverage: bool = True, include_already_downloaded: bool = False, target_directory: str | Path | None = None, ) -> tuple: """ Get a list of transform grids that can be downloaded. Parameters ---------- source_id: str, optional area_of_use: str, optional filename: str, optional bbox: BBox, optional spatial_test: str, default="intersects" Can be "contains" or "intersects". include_world_coverage: bool, default=True If True, it will include grids with a global extent. include_already_downloaded: bool, default=False If True, it will list grids regardless of if they are downloaded. target_directory: str | Path, optional The directory to download the geojson file to. Default is the user writable directory. Returns ------- list[dict[str, Any]]: A list of geojson data of containing information about features that can be downloaded. """ features = _load_grid_geojson(target_directory=target_directory)["features"] if bbox is not None: if bbox.west > 180 and bbox.east > bbox.west: bbox.west -= 360 bbox.east -= 360 elif bbox.west < -180 and bbox.east > bbox.west: bbox.west += 360 bbox.east += 360 elif abs(bbox.west) < 180 and abs(bbox.east) < 180 and bbox.east < bbox.west: bbox.east += 360 features = filter( partial( _filter_bbox, bbox=bbox, spatial_test=spatial_test, include_world_coverage=include_world_coverage, ), features, ) # filter by properties features = filter( partial( _filter_properties, source_id=source_id, area_of_use=area_of_use, filename=filename, ), features, ) if include_already_downloaded: return tuple(features) return tuple(filter(_filter_download_needed, features)) pyproj-3.7.1/pyproj/transformer.py000066400000000000000000001274401475425760300173310ustar00rootroot00000000000000""" The transformer module is for performing cartographic transformations. """ __all__ = [ "transform", "itransform", "Transformer", "TransformerGroup", "AreaOfInterest", ] import threading import warnings from abc import ABC, abstractmethod from array import array from collections.abc import Iterable, Iterator from dataclasses import dataclass from itertools import chain, islice from pathlib import Path from typing import Any, overload from pyproj import CRS from pyproj._compat import cstrencode from pyproj._context import _clear_proj_error from pyproj._crs import AreaOfUse, CoordinateOperation from pyproj._transformer import ( # noqa: F401 pylint: disable=unused-import AreaOfInterest, _Transformer, _TransformerGroup, ) from pyproj.datadir import get_user_data_dir from pyproj.enums import ProjVersion, TransformDirection, WktVersion from pyproj.exceptions import ProjError from pyproj.sync import _download_resource_file from pyproj.utils import _convertback, _copytobuffer class TransformerMaker(ABC): """ .. versionadded:: 3.1.0 Base class for generating new instances of the Cython _Transformer class for thread safety in the Transformer class. """ @abstractmethod def __call__(self) -> _Transformer: """ Returns ------- _Transformer """ raise NotImplementedError @dataclass(frozen=True) class TransformerUnsafe(TransformerMaker): """ .. versionadded:: 3.1.0 Returns the original Cython _Transformer and is not thread-safe. """ transformer: _Transformer def __call__(self) -> _Transformer: """ Returns ------- _Transformer """ return self.transformer @dataclass(frozen=True) class TransformerFromCRS( # pylint: disable=too-many-instance-attributes TransformerMaker ): """ .. versionadded:: 3.1.0 .. versionadded:: 3.4.0 force_over Generates a Cython _Transformer class from input CRS data. """ crs_from: bytes crs_to: bytes always_xy: bool area_of_interest: AreaOfInterest | None authority: str | None accuracy: str | None allow_ballpark: bool | None force_over: bool = False only_best: bool | None = None def __call__(self) -> _Transformer: """ Returns ------- _Transformer """ return _Transformer.from_crs( self.crs_from, self.crs_to, always_xy=self.always_xy, area_of_interest=self.area_of_interest, authority=self.authority, accuracy=self.accuracy, allow_ballpark=self.allow_ballpark, force_over=self.force_over, only_best=self.only_best, ) @dataclass(frozen=True) class TransformerFromPipeline(TransformerMaker): """ .. versionadded:: 3.1.0 Generates a Cython _Transformer class from input pipeline data. """ proj_pipeline: bytes def __call__(self) -> _Transformer: """ Returns ------- _Transformer """ return _Transformer.from_pipeline(self.proj_pipeline) class TransformerGroup(_TransformerGroup): """ The TransformerGroup is a set of possible transformers from one CRS to another. .. versionadded:: 2.3.0 .. warning:: CoordinateOperation and Transformer objects returned are not thread-safe. From PROJ docs:: The operations are sorted with the most relevant ones first: by descending area (intersection of the transformation area with the area of interest, or intersection of the transformation with the area of use of the CRS), and by increasing accuracy. Operations with unknown accuracy are sorted last, whatever their area. """ def __init__( self, crs_from: Any, crs_to: Any, always_xy: bool = False, area_of_interest: AreaOfInterest | None = None, authority: str | None = None, accuracy: float | None = None, allow_ballpark: bool = True, allow_superseded: bool = False, ) -> None: """Get all possible transformations from a :obj:`pyproj.crs.CRS` or input used to create one. .. versionadded:: 3.4.0 authority, accuracy, allow_ballpark .. versionadded:: 3.6.0 allow_superseded Parameters ---------- crs_from: pyproj.crs.CRS or input used to create one Projection of input data. crs_to: pyproj.crs.CRS or input used to create one Projection of output data. always_xy: bool, default=False If true, the transform method will accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitude for geographic CRS and easting, northing for most projected CRS. area_of_interest: :class:`.AreaOfInterest`, optional The area of interest to help order the transformations based on the best operation for the area. authority: str, optional When not specified, coordinate operations from any authority will be searched, with the restrictions set in the authority_to_authority_preference database table related to the authority of the source/target CRS themselves. If authority is set to “any”, then coordinate operations from any authority will be searched. If authority is a non-empty string different from "any", then coordinate operations will be searched only in that authority namespace (e.g. EPSG). accuracy: float, optional The minimum desired accuracy (in metres) of the candidate coordinate operations. allow_ballpark: bool, default=True Set to False to disallow the use of Ballpark transformation in the candidate coordinate operations. Default is to allow. allow_superseded: bool, default=False Set to True to allow the use of superseded (but not deprecated) transformations in the candidate coordinate operations. Default is to disallow. """ super().__init__( CRS.from_user_input(crs_from)._crs, CRS.from_user_input(crs_to)._crs, always_xy=always_xy, area_of_interest=area_of_interest, authority=authority, accuracy=-1 if accuracy is None else accuracy, allow_ballpark=allow_ballpark, allow_superseded=allow_superseded, ) for iii, transformer in enumerate(self._transformers): # pylint: disable=unsupported-assignment-operation self._transformers[iii] = Transformer(TransformerUnsafe(transformer)) @property def transformers(self) -> list["Transformer"]: """ list[:obj:`Transformer`]: List of available :obj:`Transformer` associated with the transformation. """ return self._transformers @property def unavailable_operations(self) -> list[CoordinateOperation]: """ list[:obj:`pyproj.crs.CoordinateOperation`]: List of :obj:`pyproj.crs.CoordinateOperation` that are not available due to missing grids. """ return self._unavailable_operations @property def best_available(self) -> bool: """ bool: If True, the best possible transformer is available. """ return self._best_available def download_grids( self, directory: str | Path | None = None, open_license: bool = True, verbose: bool = False, ) -> None: """ .. versionadded:: 3.0.0 Download missing grids that can be downloaded automatically. .. warning:: There are cases where the URL to download the grid is missing. In those cases, you can enable enable :ref:`debugging-internal-proj` and perform a transformation. The logs will show the grids PROJ searches for. Parameters ---------- directory: str or Path, optional The directory to download the grids to. Defaults to :func:`pyproj.datadir.get_user_data_dir` open_license: bool, default=True If True, will only download grids with an open license. verbose: bool, default=False If True, will print information about grids downloaded. """ if directory is None: directory = get_user_data_dir(True) # pylint: disable=not-an-iterable for unavailable_operation in self.unavailable_operations: for grid in unavailable_operation.grids: if ( not grid.available and grid.url.endswith(grid.short_name) and grid.direct_download and (grid.open_license or not open_license) ): _download_resource_file( file_url=grid.url, short_name=grid.short_name, directory=directory, verbose=verbose, ) elif not grid.available and verbose: warnings.warn(f"Skipped: {grid}") def __repr__(self) -> str: return ( f"\n" f"- transformers: {len(self.transformers)}\n" f"- unavailable_operations: {len(self.unavailable_operations)}" ) class TransformerLocal(threading.local): """ Threading local instance for cython _Transformer class. For more details, see: https://github.com/pyproj4/pyproj/issues/782 """ def __init__(self): self.transformer = None # Initialises in each thread super().__init__() class Transformer: """ The Transformer class is for facilitating re-using transforms without needing to re-create them. The goal is to make repeated transforms faster. Additionally, it provides multiple methods for initialization. .. versionadded:: 2.1.0 """ def __init__( self, transformer_maker: TransformerMaker | None = None, ) -> None: if not isinstance(transformer_maker, TransformerMaker): _clear_proj_error() raise ProjError( "Transformer must be initialized using: " "'from_crs' or 'from_pipeline'." ) self._local = TransformerLocal() self._local.transformer = transformer_maker() self._transformer_maker = transformer_maker def __getstate__(self) -> dict[str, Any]: return {"_transformer_maker": self._transformer_maker} def __setstate__(self, state: dict[str, Any]): self.__dict__.update(state) self._local = TransformerLocal() self._local.transformer = self._transformer_maker() @property def _transformer(self): """ The Cython _Transformer object for this thread. Returns ------- _Transformer """ if self._local.transformer is None: self._local.transformer = self._transformer_maker() return self._local.transformer @property def name(self) -> str: """ str: Name of the projection. """ return self._transformer.id @property def description(self) -> str: """ str: Description of the projection. """ return self._transformer.description @property def definition(self) -> str: """ str: Definition of the projection. """ return self._transformer.definition @property def has_inverse(self) -> bool: """ bool: True if an inverse mapping exists. """ return self._transformer.has_inverse @property def accuracy(self) -> float: """ float: Expected accuracy of the transformation. -1 if unknown. """ return self._transformer.accuracy @property def area_of_use(self) -> AreaOfUse: """ .. versionadded:: 2.3.0 Returns ------- AreaOfUse: The area of use object with associated attributes. """ return self._transformer.area_of_use @property def remarks(self) -> str: """ .. versionadded:: 2.4.0 Returns ------- str: Remarks about object. """ return self._transformer.remarks @property def scope(self) -> str: """ .. versionadded:: 2.4.0 Returns ------- str: Scope of object. """ return self._transformer.scope @property def operations(self) -> tuple[CoordinateOperation] | None: """ .. versionadded:: 2.4.0 Returns ------- tuple[CoordinateOperation]: The operations in a concatenated operation. """ return self._transformer.operations def get_last_used_operation(self) -> "Transformer": """ .. versionadded:: 3.4.0 .. note:: Requires PROJ 9.1+ See: :c:func:`proj_trans_get_last_used_operation` Returns ------- Transformer: The operation used in the transform call. """ return Transformer( TransformerUnsafe(self._transformer.get_last_used_operation()) ) @property def is_network_enabled(self) -> bool: """ .. versionadded:: 3.0.0 Returns ------- bool: If the network is enabled. """ return self._transformer.is_network_enabled @property def source_crs(self) -> CRS | None: """ .. versionadded:: 3.3.0 Returns ------- CRS | None: The source CRS of a CoordinateOperation. """ return ( None if self._transformer.source_crs is None else CRS(self._transformer.source_crs) ) @property def target_crs(self) -> CRS | None: """ .. versionadded:: 3.3.0 Returns ------- CRS | None: The target CRS of a CoordinateOperation. """ return ( None if self._transformer.target_crs is None else CRS(self._transformer.target_crs) ) @staticmethod def from_proj( proj_from: Any, proj_to: Any, always_xy: bool = False, area_of_interest: AreaOfInterest | None = None, ) -> "Transformer": """Make a Transformer from a :obj:`pyproj.Proj` or input used to create one. .. deprecated:: 3.4.1 :meth:`~Transformer.from_crs` is preferred. .. versionadded:: 2.2.0 always_xy .. versionadded:: 2.3.0 area_of_interest Parameters ---------- proj_from: :obj:`pyproj.Proj` or input used to create one Projection of input data. proj_to: :obj:`pyproj.Proj` or input used to create one Projection of output data. always_xy: bool, default=False If true, the transform method will accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitude for geographic CRS and easting, northing for most projected CRS. area_of_interest: :class:`.AreaOfInterest`, optional The area of interest to help select the transformation. Returns ------- Transformer """ # pylint: disable=import-outside-toplevel from pyproj import Proj if not isinstance(proj_from, Proj): proj_from = Proj(proj_from) if not isinstance(proj_to, Proj): proj_to = Proj(proj_to) return Transformer.from_crs( proj_from.crs, proj_to.crs, always_xy=always_xy, area_of_interest=area_of_interest, ) @staticmethod def from_crs( crs_from: Any, crs_to: Any, always_xy: bool = False, area_of_interest: AreaOfInterest | None = None, authority: str | None = None, accuracy: float | None = None, allow_ballpark: bool | None = None, force_over: bool = False, only_best: bool | None = None, ) -> "Transformer": """Make a Transformer from a :obj:`pyproj.crs.CRS` or input used to create one. See: - :c:func:`proj_create_crs_to_crs` - :c:func:`proj_create_crs_to_crs_from_pj` .. versionadded:: 2.2.0 always_xy .. versionadded:: 2.3.0 area_of_interest .. versionadded:: 3.1.0 authority, accuracy, allow_ballpark .. versionadded:: 3.4.0 force_over .. versionadded:: 3.5.0 only_best Parameters ---------- crs_from: pyproj.crs.CRS or input used to create one Projection of input data. crs_to: pyproj.crs.CRS or input used to create one Projection of output data. always_xy: bool, default=False If true, the transform method will accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitude for geographic CRS and easting, northing for most projected CRS. area_of_interest: :class:`.AreaOfInterest`, optional The area of interest to help select the transformation. authority: str, optional When not specified, coordinate operations from any authority will be searched, with the restrictions set in the authority_to_authority_preference database table related to the authority of the source/target CRS themselves. If authority is set to “any”, then coordinate operations from any authority will be searched. If authority is a non-empty string different from "any", then coordinate operations will be searched only in that authority namespace (e.g. EPSG). accuracy: float, optional The minimum desired accuracy (in metres) of the candidate coordinate operations. allow_ballpark: bool, optional Set to False to disallow the use of Ballpark transformation in the candidate coordinate operations. Default is to allow. force_over: bool, default=False If True, it will to force the +over flag on the transformation. Requires PROJ 9+. only_best: bool, optional Can be set to True to cause PROJ to error out if the best transformation known to PROJ and usable by PROJ if all grids known and usable by PROJ were accessible, cannot be used. Best transformation should be understood as the transformation returned by :c:func:`proj_get_suggested_operation` if all known grids were accessible (either locally or through network). Note that the default value for this option can be also set with the :envvar:`PROJ_ONLY_BEST_DEFAULT` environment variable, or with the ``only_best_default`` setting of :ref:`proj-ini`. The only_best kwarg overrides the default value if set. Requires PROJ 9.2+. Returns ------- Transformer """ return Transformer( TransformerFromCRS( cstrencode(CRS.from_user_input(crs_from).srs), cstrencode(CRS.from_user_input(crs_to).srs), always_xy=always_xy, area_of_interest=area_of_interest, authority=authority, accuracy=accuracy if accuracy is None else str(accuracy), allow_ballpark=allow_ballpark, force_over=force_over, only_best=only_best, ) ) @staticmethod def from_pipeline(proj_pipeline: str) -> "Transformer": """Make a Transformer from a PROJ pipeline string. :ref:`pipeline` See: - :c:func:`proj_create` - :c:func:`proj_create_from_database` .. versionadded:: 3.1.0 AUTH:CODE string support (e.g. EPSG:1671) Allowed input: - a PROJ string - a WKT string - a PROJJSON string - an object code (e.g. "EPSG:1671" "urn:ogc:def:coordinateOperation:EPSG::1671") - an object name. e.g "ITRF2014 to ETRF2014 (1)". In that case as uniqueness is not guaranteed, heuristics are applied to determine the appropriate best match. - a OGC URN combining references for concatenated operations (e.g. "urn:ogc:def:coordinateOperation,coordinateOperation:EPSG::3895, coordinateOperation:EPSG::1618") Parameters ---------- proj_pipeline: str Projection pipeline string. Returns ------- Transformer """ return Transformer(TransformerFromPipeline(cstrencode(proj_pipeline))) @overload def transform( # noqa: E704 pylint: disable=invalid-name self, xx: Any, yy: Any, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, inplace: bool = False, ) -> tuple[Any, Any]: ... @overload def transform( # noqa: E704 pylint: disable=invalid-name self, xx: Any, yy: Any, zz: Any, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, inplace: bool = False, ) -> tuple[Any, Any, Any]: ... @overload def transform( # noqa: E704 pylint: disable=invalid-name self, xx: Any, yy: Any, zz: Any, tt: Any, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, inplace: bool = False, ) -> tuple[Any, Any, Any, Any]: ... def transform( # pylint: disable=invalid-name self, xx, yy, zz=None, tt=None, radians=False, errcheck=False, direction=TransformDirection.FORWARD, inplace=False, ): """ Transform points between two coordinate systems. See: :c:func:`proj_trans_generic` .. versionadded:: 2.1.1 errcheck .. versionadded:: 2.2.0 direction .. versionadded:: 3.2.0 inplace Accepted numeric scalar or array: - :class:`int` - :class:`float` - :class:`numpy.floating` - :class:`numpy.integer` - :class:`list` - :class:`tuple` - :class:`array.array` - :class:`numpy.ndarray` - :class:`xarray.DataArray` - :class:`pandas.Series` Parameters ---------- xx: scalar or array Input x coordinate(s). yy: scalar or array Input y coordinate(s). zz: scalar or array, optional Input z coordinate(s). tt: scalar or array, optional Input time coordinate(s). radians: bool, default=False If True, will expect input data to be in radians and will return radians if the projection is geographic. Otherwise, it uses degrees. Ignored for pipeline transformations with pyproj 2, but will work in pyproj 3. errcheck: bool, default=False If True, an exception is raised if the errors are found in the process. If False, ``inf`` is returned for errors. direction: pyproj.enums.TransformDirection, optional The direction of the transform. Default is :attr:`pyproj.enums.TransformDirection.FORWARD`. inplace: bool, default=False If True, will attempt to write the results to the input array instead of returning a new array. This will fail if the input is not an array in C order with the double data type. Example -------- >>> from pyproj import Transformer >>> transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857") >>> x3, y3 = transformer.transform(33, 98) >>> f"{x3:.3f} {y3:.3f}" '10909310.098 3895303.963' >>> pipeline_str = ( ... "+proj=pipeline +step +proj=longlat +ellps=WGS84 " ... "+step +proj=unitconvert +xy_in=rad +xy_out=deg" ... ) >>> pipe_trans = Transformer.from_pipeline(pipeline_str) >>> xt, yt = pipe_trans.transform(2.1, 0.001) >>> f"{xt:.3f} {yt:.3f}" '2.100 0.001' >>> transproj = Transformer.from_crs( ... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'}, ... "EPSG:4326", ... always_xy=True, ... ) >>> xpj, ypj, zpj = transproj.transform( ... -2704026.010, ... -4253051.810, ... 3895878.820, ... radians=True, ... ) >>> f"{xpj:.3f} {ypj:.3f} {zpj:.3f}" '-2.137 0.661 -20.531' >>> transprojr = Transformer.from_crs( ... "EPSG:4326", ... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'}, ... always_xy=True, ... ) >>> xpjr, ypjr, zpjr = transprojr.transform(xpj, ypj, zpj, radians=True) >>> f"{xpjr:.3f} {ypjr:.3f} {zpjr:.3f}" '-2704026.010 -4253051.810 3895878.820' >>> transformer = Transformer.from_crs("EPSG:4326", 4326) >>> xeq, yeq = transformer.transform(33, 98) >>> f"{xeq:.0f} {yeq:.0f}" '33 98' """ try: # function optimized for point data return self._transformer._transform_point( inx=xx, iny=yy, inz=zz, intime=tt, direction=direction, radians=radians, errcheck=errcheck, ) except TypeError: pass # process inputs, making copies that support buffer API. inx, x_data_type = _copytobuffer(xx, inplace=inplace) iny, y_data_type = _copytobuffer(yy, inplace=inplace) if zz is not None: inz, z_data_type = _copytobuffer(zz, inplace=inplace) else: inz = None if tt is not None: intime, t_data_type = _copytobuffer(tt, inplace=inplace) else: intime = None # call pj_transform. inx,iny,inz buffers modified in place. self._transformer._transform( inx=inx, iny=iny, inz=inz, intime=intime, direction=direction, radians=radians, errcheck=errcheck, ) # if inputs were lists, tuples or floats, convert back. outx = _convertback(x_data_type, inx) outy = _convertback(y_data_type, iny) return_data: tuple[Any, ...] = (outx, outy) if zz is not None: return_data += (_convertback(z_data_type, inz),) if tt is not None: return_data += (_convertback(t_data_type, intime),) return return_data def itransform( self, points: Any, switch: bool = False, time_3rd: bool = False, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, ) -> Iterator[Iterable]: """ Iterator/generator version of the function pyproj.Transformer.transform. See: :c:func:`proj_trans_generic` .. versionadded:: 2.1.1 errcheck .. versionadded:: 2.2.0 direction Parameters ---------- points: list List of point tuples. switch: bool, default=False If True x, y or lon,lat coordinates of points are switched to y, x or lat, lon. Default is False. time_3rd: bool, default=False If the input coordinates are 3 dimensional and the 3rd dimension is time. radians: bool, default=False If True, will expect input data to be in radians and will return radians if the projection is geographic. Otherwise, it uses degrees. Ignored for pipeline transformations with pyproj 2, but will work in pyproj 3. errcheck: bool, default=False If True, an exception is raised if the errors are found in the process. If False, ``inf`` is returned for errors. direction: pyproj.enums.TransformDirection, optional The direction of the transform. Default is :attr:`pyproj.enums.TransformDirection.FORWARD`. Example -------- >>> from pyproj import Transformer >>> transformer = Transformer.from_crs(4326, 2100) >>> points = [(22.95, 40.63), (22.81, 40.53), (23.51, 40.86)] >>> for pt in transformer.itransform(points): '{:.3f} {:.3f}'.format(*pt) '2221638.801 2637034.372' '2212924.125 2619851.898' '2238294.779 2703763.736' >>> pipeline_str = ( ... "+proj=pipeline +step +proj=longlat +ellps=WGS84 " ... "+step +proj=unitconvert +xy_in=rad +xy_out=deg" ... ) >>> pipe_trans = Transformer.from_pipeline(pipeline_str) >>> for pt in pipe_trans.itransform([(2.1, 0.001)]): ... '{:.3f} {:.3f}'.format(*pt) '2.100 0.001' >>> transproj = Transformer.from_crs( ... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'}, ... "EPSG:4326", ... always_xy=True, ... ) >>> for pt in transproj.itransform( ... [(-2704026.010, -4253051.810, 3895878.820)], ... radians=True, ... ): ... '{:.3f} {:.3f} {:.3f}'.format(*pt) '-2.137 0.661 -20.531' >>> transprojr = Transformer.from_crs( ... "EPSG:4326", ... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'}, ... always_xy=True, ... ) >>> for pt in transprojr.itransform( ... [(-2.137, 0.661, -20.531)], ... radians=True ... ): ... '{:.3f} {:.3f} {:.3f}'.format(*pt) '-2704214.394 -4254414.478 3894270.731' >>> transproj_eq = Transformer.from_crs( ... 'EPSG:4326', ... '+proj=longlat +datum=WGS84 +no_defs +type=crs', ... always_xy=True, ... ) >>> for pt in transproj_eq.itransform([(-2.137, 0.661)]): ... '{:.3f} {:.3f}'.format(*pt) '-2.137 0.661' """ point_it = iter(points) # point iterator # get first point to check stride try: fst_pt = next(point_it) except StopIteration: raise ValueError("iterable must contain at least one point") from None stride = len(fst_pt) if stride not in (2, 3, 4): raise ValueError("points can contain up to 4 coordinates") if time_3rd and stride != 3: raise ValueError("'time_3rd' is only valid for 3 coordinates.") # create a coordinate sequence generator etc. x1,y1,z1,x2,y2,z2,.... # chain so the generator returns the first point that was already acquired coord_gen = chain( fst_pt, (coords[c] for coords in point_it for c in range(stride)) ) while True: # create a temporary buffer storage for # the next 64 points (64*stride*8 bytes) buff = array("d", islice(coord_gen, 0, 64 * stride)) if len(buff) == 0: break self._transformer._transform_sequence( stride, buff, switch=switch, direction=direction, time_3rd=time_3rd, radians=radians, errcheck=errcheck, ) yield from zip(*([iter(buff)] * stride)) def transform_bounds( self, left: float, bottom: float, right: float, top: float, densify_pts: int = 21, radians: bool = False, errcheck: bool = False, direction: TransformDirection | str = TransformDirection.FORWARD, ) -> tuple[float, float, float, float]: """ .. versionadded:: 3.1.0 See: :c:func:`proj_trans_bounds` Transform boundary densifying the edges to account for nonlinear transformations along these edges and extracting the outermost bounds. If the destination CRS is geographic and right < left then the bounds crossed the antimeridian. In this scenario there are two polygons, one on each side of the antimeridian. The first polygon should be constructed with (left, bottom, 180, top) and the second with (-180, bottom, top, right). To construct the bounding polygons with shapely:: def bounding_polygon(left, bottom, right, top): if right < left: return shapely.geometry.MultiPolygon( [ shapely.geometry.box(left, bottom, 180, top), shapely.geometry.box(-180, bottom, right, top), ] ) return shapely.geometry.box(left, bottom, right, top) Parameters ---------- left: float Minimum bounding coordinate of the first axis in source CRS (or the target CRS if using the reverse direction). bottom: float Minimum bounding coordinate of the second axis in source CRS. (or the target CRS if using the reverse direction). right: float Maximum bounding coordinate of the first axis in source CRS. (or the target CRS if using the reverse direction). top: float Maximum bounding coordinate of the second axis in source CRS. (or the target CRS if using the reverse direction). densify_points: uint, default=21 Number of points to add to each edge to account for nonlinear edges produced by the transform process. Large numbers will produce worse performance. radians: bool, default=False If True, will expect input data to be in radians and will return radians if the projection is geographic. Otherwise, it uses degrees. errcheck: bool, default=False If True, an exception is raised if the errors are found in the process. If False, ``inf`` is returned for errors. direction: pyproj.enums.TransformDirection, optional The direction of the transform. Default is :attr:`pyproj.enums.TransformDirection.FORWARD`. Returns ------- left, bottom, right, top: float Outermost coordinates in target coordinate reference system. """ return self._transformer._transform_bounds( left=left, bottom=bottom, right=right, top=top, densify_pts=densify_pts, radians=radians, errcheck=errcheck, direction=direction, ) def to_proj4( self, version: ProjVersion | str = ProjVersion.PROJ_5, pretty: bool = False, ) -> str: """ Convert the projection to a PROJ string. .. versionadded:: 3.1.0 Parameters ---------- version: pyproj.enums.ProjVersion The version of the PROJ string output. Default is :attr:`pyproj.enums.ProjVersion.PROJ_5`. pretty: bool, default=False If True, it will set the output to be a multiline string. Returns ------- str: The PROJ string. """ return self._transformer.to_proj4(version=version, pretty=pretty) def to_wkt( self, version: WktVersion | str = WktVersion.WKT2_2019, pretty: bool = False, ) -> str: """ Convert the projection to a WKT string. Version options: - WKT2_2015 - WKT2_2015_SIMPLIFIED - WKT2_2019 - WKT2_2019_SIMPLIFIED - WKT1_GDAL - WKT1_ESRI Parameters ---------- version: pyproj.enums.WktVersion, optional The version of the WKT output. Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`. pretty: bool, default=False If True, it will set the output to be a multiline string. Returns ------- str: The WKT string. """ return self._transformer.to_wkt(version=version, pretty=pretty) def to_json(self, pretty: bool = False, indentation: int = 2) -> str: """ Convert the projection to a JSON string. .. versionadded:: 2.4.0 Parameters ---------- pretty: bool, default=False If True, it will set the output to be a multiline string. indentation: int, default=2 If pretty is True, it will set the width of the indentation. Returns ------- str: The JSON string. """ return self._transformer.to_json(pretty=pretty, indentation=indentation) def to_json_dict(self) -> dict: """ Convert the projection to a JSON dictionary. .. versionadded:: 2.4.0 Returns ------- dict: The JSON dictionary. """ return self._transformer.to_json_dict() def __str__(self) -> str: return self.definition def __repr__(self) -> str: return ( f"<{self._transformer.type_name}: {self.name}>\n" f"Description: {self.description}\n" f"Area of Use:\n{self.area_of_use or '- undefined'}" ) def __eq__(self, other: object) -> bool: if not isinstance(other, Transformer): return False return self._transformer.__eq__(other._transformer) def is_exact_same(self, other: Any) -> bool: """ Check if the Transformer objects are the exact same. If it is not a Transformer, then it returns False. Parameters ---------- other: Any Returns ------- bool """ if not isinstance(other, Transformer): return False return self._transformer.is_exact_same(other._transformer) def transform( # pylint: disable=invalid-name p1: Any, p2: Any, x: Any, y: Any, z: Any | None = None, tt: Any | None = None, radians: bool = False, errcheck: bool = False, always_xy: bool = False, ): """ .. versionadded:: 2.2.0 always_xy .. deprecated:: 2.6.1 This function is deprecated. See: :ref:`upgrade_transformer` x2, y2, z2 = transform(p1, p2, x1, y1, z1) Transform points between two coordinate systems defined by the Proj instances p1 and p2. The points x1,y1,z1 in the coordinate system defined by p1 are transformed to x2,y2,z2 in the coordinate system defined by p2. z1 is optional, if it is not set it is assumed to be zero (and only x2 and y2 are returned). If the optional keyword 'radians' is True (default is False), then all input and output coordinates will be in radians instead of the default of degrees for geographic input/output projections. If the optional keyword 'errcheck' is set to True an exception is raised if the transformation is invalid. By default errcheck=False and ``inf`` is returned for an invalid transformation (and no exception is raised). If `always_xy` is toggled, the transform method will accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitude for geographic CRS and easting, northing for most projected CRS. In addition to converting between cartographic and geographic projection coordinates, this function can take care of datum shifts (which cannot be done using the __call__ method of the Proj instances). It also allows for one of the coordinate systems to be geographic (proj = 'latlong'). x,y and z can be numpy or regular python arrays, python lists/tuples or scalars. Arrays are fastest. For projections in geocentric coordinates, values of x and y are given in meters. z is always meters. """ warnings.warn( ( "This function is deprecated. " "See: https://pyproj4.github.io/pyproj/stable/" "gotchas.html#upgrading-to-pyproj-2-from-pyproj-1" ), FutureWarning, stacklevel=2, ) return Transformer.from_proj(p1, p2, always_xy=always_xy).transform( xx=x, yy=y, zz=z, tt=tt, radians=radians, errcheck=errcheck ) def itransform( # pylint: disable=invalid-name p1: Any, p2: Any, points: Iterable[Iterable], switch: bool = False, time_3rd: bool = False, radians: bool = False, errcheck: bool = False, always_xy: bool = False, ): """ .. versionadded:: 2.2.0 always_xy .. deprecated:: 2.6.1 This function is deprecated. See: :ref:`upgrade_transformer` points2 = itransform(p1, p2, points1) Iterator/generator version of the function pyproj.transform. Transform points between two coordinate systems defined by the Proj instances p1 and p2. This function can be used as an alternative to pyproj.transform when there is a need to transform a big number of coordinates lazily, for example when reading and processing from a file. Points1 is an iterable/generator of coordinates x1,y1(,z1) or lon1,lat1(,z1) in the coordinate system defined by p1. Points2 is an iterator that returns tuples of x2,y2(,z2) or lon2,lat2(,z2) coordinates in the coordinate system defined by p2. z are provided optionally. Points1 can be: - a tuple/list of tuples/lists i.e. for 2d points: [(xi,yi),(xj,yj),....(xn,yn)] - a Nx3 or Nx2 2d numpy array where N is the point number - a generator of coordinates (xi,yi) for 2d points or (xi,yi,zi) for 3d If optional keyword 'switch' is True (default is False) then x, y or lon,lat coordinates of points are switched to y, x or lat, lon. If the optional keyword 'radians' is True (default is False), then all input and output coordinates will be in radians instead of the default of degrees for geographic input/output projections. If the optional keyword 'errcheck' is set to True an exception is raised if the transformation is invalid. By default errcheck=False and ``inf`` is returned for an invalid transformation (and no exception is raised). If `always_xy` is toggled, the transform method will accept as input and return as output coordinates using the traditional GIS order, that is longitude, latitude for geographic CRS and easting, northing for most projected CRS. Example usage: >>> from pyproj import Proj, itransform >>> # projection 1: WGS84 >>> # (defined by epsg code 4326) >>> p1 = Proj('epsg:4326', preserve_units=False) >>> # projection 2: GGRS87 / Greek Grid >>> p2 = Proj('epsg:2100', preserve_units=False) >>> # Three points with coordinates lon, lat in p1 >>> points = [(22.95, 40.63), (22.81, 40.53), (23.51, 40.86)] >>> # transform this point to projection 2 coordinates. >>> for pt in itransform(p1,p2,points, always_xy=True): '%6.3f %7.3f' % pt '411050.470 4497928.574' '399060.236 4486978.710' '458553.243 4523045.485' >>> for pt in itransform(4326, 4326, [(30, 60)]): ... '{:.0f} {:.0f}'.format(*pt) '30 60' """ warnings.warn( ( "This function is deprecated. " "See: https://pyproj4.github.io/pyproj/stable/" "gotchas.html#upgrading-to-pyproj-2-from-pyproj-1" ), FutureWarning, stacklevel=2, ) return Transformer.from_proj(p1, p2, always_xy=always_xy).itransform( points, switch=switch, time_3rd=time_3rd, radians=radians, errcheck=errcheck ) pyproj-3.7.1/pyproj/utils.py000066400000000000000000000105721475425760300161240ustar00rootroot00000000000000""" Utility functions used within pyproj """ import json from array import array from enum import Enum, auto from typing import Any def is_null(value: Any) -> bool: """ Check if value is NaN or None """ # pylint: disable=comparison-with-itself return value != value or value is None def strtobool(value: Any) -> bool: """ https://docs.python.org/3.9/distutils/apiref.html#distutils.util.strtobool Here since distutils is deprecated. Convert a string representation of truth to True or False. """ value = str(value).lower() if value in ("y", "yes", "t", "true", "on", "1"): return True if value in ("n", "no", "f", "false", "off", "0"): return False raise ValueError(f"invalid truth value: '{value}'") class NumpyEncoder(json.JSONEncoder): """ Handle numpy types when dumping to JSON """ def default(self, obj): # pylint: disable=arguments-renamed try: return obj.tolist() except AttributeError: pass try: # numpy scalars if obj.dtype.kind == "f": return float(obj) if obj.dtype.kind == "i": return int(obj) except AttributeError: pass return json.JSONEncoder.default(self, obj) class DataType(Enum): """ Data type for copy to buffer and convertback operations """ FLOAT = auto() LIST = auto() TUPLE = auto() ARRAY = auto() def _copytobuffer_return_scalar(xxx: Any) -> tuple[array, DataType]: """ Prepares scalar for PROJ C-API: - Makes a copy because PROJ modifies buffer in place - Make sure dtype is double as that is what PROJ expects - Makes sure object supports Python Buffer API Parameters ----------- xxx: float or 0-d numpy array Returns ------- tuple[Any, DataType] The copy of the data prepared for the PROJ API & Python Buffer API. """ try: return array("d", (float(xxx),)), DataType.FLOAT except Exception: raise TypeError("input must be a scalar") from None def _copytobuffer(xxx: Any, inplace: bool = False) -> tuple[Any, DataType]: """ Prepares data for PROJ C-API: - Makes a copy because PROJ modifies buffer in place - Make sure dtype is double as that is what PROJ expects - Makes sure object supports Python Buffer API If the data is a numpy array, it ensures the data is in C order. Parameters ---------- xxx: Any A scalar, list, tuple, numpy.array, pandas.Series, xaray.DataArray, or dask.array.Array. inplace: bool, default=False If True, will return the array without a copy if it meets the requirements of the Python Buffer API & PROJ C-API. Returns ------- tuple[Any, DataType] The copy of the data prepared for the PROJ API & Python Buffer API. """ # check for pandas.Series, xarray.DataArray or dask.array.Array # also handle numpy masked Arrays; note that pandas.Series also has a # "mask" attribute, hence checking for simply the "mask" attr in that # case isn't sufficient if ( not hasattr(xxx, "hardmask") and hasattr(xxx, "__array__") and callable(xxx.__array__) ): xxx = xxx.__array__() # handle numpy data if hasattr(xxx, "shape"): if xxx.shape == (): # convert numpy array scalar to float # (array scalars don't support buffer API) return _copytobuffer_return_scalar(xxx) # Use C order when copying to handle arrays in fortran order return xxx.astype("d", order="C", copy=not inplace), DataType.ARRAY data_type = DataType.ARRAY if isinstance(xxx, array): if not inplace or xxx.typecode != "d": xxx = array("d", xxx) elif isinstance(xxx, list): xxx = array("d", xxx) data_type = DataType.LIST elif isinstance(xxx, tuple): xxx = array("d", xxx) data_type = DataType.TUPLE else: return _copytobuffer_return_scalar(xxx) return xxx, data_type def _convertback(data_type: DataType, inx: Any) -> Any: # if inputs were lists, tuples or floats, convert back to original type. if data_type == DataType.FLOAT: return inx[0] if data_type == DataType.LIST: return inx.tolist() if data_type == DataType.TUPLE: return tuple(inx) return inx pyproj-3.7.1/pyproject.toml000066400000000000000000000070371475425760300160050ustar00rootroot00000000000000[build-system] requires = ["setuptools>=61.0.0", "wheel", "cython>=3"] build-backend = "setuptools.build_meta" [project] name = "pyproj" dynamic = ["version"] description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" readme = "README.md" authors = [ {name = "Jeff Whitaker", email = "jeffrey.s.whitaker@noaa.gov"}, ] maintainers = [ {name = "pyproj contributors"}, ] license = {text = "MIT"} keywords = [ "GIS", "map", "geospatial", "coordinate-systems", "coordinate-transformation", "cartographic-projection", "geodesic", ] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3 :: Only", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: GIS", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ] requires-python = ">=3.10" dependencies = [ "certifi", ] [project.urls] homepage = "https://pyproj4.github.io/pyproj/" documentation = "https://pyproj4.github.io/pyproj/" repository = "https://github.com/pyproj4/pyproj" changelog = "https://pyproj4.github.io/pyproj/stable/history.html" [project.scripts] pyproj = "pyproj.__main__:main" [tool.setuptools] zip-safe = false # https://mypy.readthedocs.io/en/stable/installed_packages.html [tool.setuptools.packages.find] include = ["pyproj", "pyproj.*"] [tool.setuptools.dynamic] version = {attr = "pyproj.__version__"} [tool.black] target_version = ["py310"] [tool.ruff] line-length = 88 target-version = "py310" fix = true [tool.ruff.lint] unfixable = [] select = [ # pyflakes "F", # pycodestyle "E", "W", # flake8-2020 "YTT", # flake8-bugbear "B", # flake8-quotes "Q", # flake8-debugger "T10", # flake8-gettext "INT", # pylint # "PL", # flake8-pytest-style # "PT", # misc lints "PIE", # flake8-pyi "PYI", # tidy imports "TID", # implicit string concatenation "ISC", # type-checking imports "TCH", # comprehensions "C4", # pygrep-hooks "PGH", # Ruff-specific rules "RUF", # flake8-bandit: exec-builtin "S102", # NumPy-specific rules "NPY", # Perflint "PERF", # flynt "FLY", # flake8-logging-format "G", # flake8-future-annotations "FA", # flake8-slots "SLOT", # flake8-raise "RSE", ] ignore = [ ### Intentionally disabled # Line too long "E501", # Unnecessary `dict` call (rewrite as a literal) "C408", # Consider iterable unpacking instead of concatenation "RUF005", # No explicit `stacklevel` keyword argument found "B028", # Unused `noqa` directive # Only work if ruff is the solve linter/formatter "RUF100", # `zip()` without an explicit `strict=` parameter "B905", # Only simple default values allowed for typed arguments "PYI011", ### TODO: Enable gradually # Rename unused "B007", # Move standard library import into a type-checking block "TCH003", # Consider f-string instead of string join "FLY002", # Use a list comprehension to create a transformed list "PERF401" ] [tool.mypy] files = ["pyproj"] python_version = "3.10" ignore_errors = false enable_error_code = "ignore-without-code" pyproj-3.7.1/pytest.ini000066400000000000000000000005211475425760300151110ustar00rootroot00000000000000[pytest] markers = slow: marks tests as slow (deselect with '-m "not slow"') network: marks tests that need a network connection (deselect with '-m "not network"') cli: marks tests that need the CLI installed (deselect with '-m "not cli"') grid: marks tests that need transformation grids (deselect with '-m "not grid"') pyproj-3.7.1/requirements-dev.txt000066400000000000000000000000551475425760300171220ustar00rootroot00000000000000cython>=0.28.4 mypy types-certifi pre-commit pyproj-3.7.1/requirements-docs.txt000066400000000000000000000000441475425760300172720ustar00rootroot00000000000000furo>=2022.6 sphinx sphinx-argparse pyproj-3.7.1/requirements-test.txt000066400000000000000000000002011475425760300173140ustar00rootroot00000000000000packaging pytest>3.6 pytest-cov numpy pandas; python_version<'3.12' shapely; python_version<'3.12' xarray; python_version<'3.12' pyproj-3.7.1/setup.py000066400000000000000000000210121475425760300145700ustar00rootroot00000000000000import os import platform import re import shutil import subprocess import sys from pathlib import Path from setuptools import Extension, setup PROJ_MIN_VERSION = (9, 2, 0) CURRENT_FILE_PATH = Path(__file__).absolute().parent BASE_INTERNAL_PROJ_DIR = Path("proj_dir") INTERNAL_PROJ_DIR = CURRENT_FILE_PATH / "pyproj" / BASE_INTERNAL_PROJ_DIR PROJ_VERSION_SEARCH = re.compile(r".*Rel\.\s+(?P\d+\.\d+\.\d+).*") VERSION_SEARCH = re.compile(r".*(?P\d+\.\d+\.\d+).*") def _parse_version(version: str) -> tuple[int, int, int]: """Convert a version string to a tuple of integers.""" match = VERSION_SEARCH.search(version) if not match: raise SystemExit( f"PROJ version unable to be determined from {version}. " "Please set the PROJ_VERSION environment variable." ) return tuple( int(ver) for ver in match.groupdict()["version"].split(".", maxsplit=2) ) def get_proj_version(proj_dir: Path) -> tuple[int, int, int]: """ Determine PROJ version. Prefer PROJ_VERSION environment variable. If PROJ_VERSION is not set, try to determine the version from the PROJ executable. """ proj_version = os.environ.get("PROJ_VERSION") if proj_version: return _parse_version(proj_version) proj = proj_dir / "bin" / "proj" proj_ver = subprocess.check_output(str(proj), stderr=subprocess.STDOUT).decode( "ascii" ) match = PROJ_VERSION_SEARCH.search(proj_ver) if not match: raise SystemExit( "PROJ version unable to be determined. " "Please set the PROJ_VERSION environment variable." ) return _parse_version(match.groupdict()["version"]) def check_proj_version(proj_version: tuple[int, int, int]) -> None: """checks that the PROJ library meets the minimum version""" if proj_version < PROJ_MIN_VERSION: proj_version_str = ".".join(str(ver) for ver in proj_version) min_proj_version_str = ".".join(str(ver) for ver in PROJ_MIN_VERSION) raise SystemExit( f"ERROR: Minimum supported PROJ version is {min_proj_version_str}, " f"installed version is {proj_version_str}. For more information see: " "https://pyproj4.github.io/pyproj/stable/installation.html" ) def get_proj_dir() -> Path: """ This function finds the base PROJ directory. """ proj_dir_environ = os.environ.get("PROJ_DIR") proj_dir: Path | None = None if proj_dir_environ is not None: proj_dir = Path(proj_dir_environ) if proj_dir is None and INTERNAL_PROJ_DIR.exists(): proj_dir = INTERNAL_PROJ_DIR print(f"Internally compiled directory being used {INTERNAL_PROJ_DIR}.") elif proj_dir is None and not INTERNAL_PROJ_DIR.exists(): proj = shutil.which("proj", path=sys.prefix) if proj is None: proj = shutil.which("proj") if proj is None: raise SystemExit( "proj executable not found. Please set the PROJ_DIR variable. " "For more information see: " "https://pyproj4.github.io/pyproj/stable/installation.html" ) proj_dir = Path(proj).parent.parent elif proj_dir is not None and proj_dir.exists(): print("PROJ_DIR is set, using existing PROJ installation..\n") else: raise SystemExit(f"ERROR: Invalid path for PROJ_DIR {proj_dir}") return proj_dir def get_proj_libdirs(proj_dir: Path) -> list[str]: """ This function finds the library directories """ proj_libdir = os.environ.get("PROJ_LIBDIR") libdirs = [] if proj_libdir is None: libdir_search_paths = (proj_dir / "lib", proj_dir / "lib64") for libdir_search_path in libdir_search_paths: if libdir_search_path.exists(): libdirs.append(str(libdir_search_path)) if not libdirs: raise SystemExit( "ERROR: PROJ_LIBDIR dir not found. Please set PROJ_LIBDIR." ) else: libdirs.append(proj_libdir) return libdirs def get_proj_incdirs(proj_dir: Path) -> list[str]: """ This function finds the include directories """ proj_incdir = os.environ.get("PROJ_INCDIR") incdirs = [] if proj_incdir is None: if (proj_dir / "include").exists(): incdirs.append(str(proj_dir / "include")) else: raise SystemExit( "ERROR: PROJ_INCDIR dir not found. Please set PROJ_INCDIR." ) else: incdirs.append(proj_incdir) return incdirs def get_cythonize_options(): """ This function gets the options to cythonize with """ # Configure optional Cython coverage. cythonize_options = { "language_level": sys.version_info[0], "compiler_directives": { "c_string_type": "str", "c_string_encoding": "utf-8", "embedsignature": True, }, } if os.environ.get("PYPROJ_FULL_COVERAGE"): cythonize_options["compiler_directives"].update(linetrace=True) cythonize_options["annotate"] = True return cythonize_options def get_libraries(libdirs: list[str]) -> list[str]: """ This function gets the libraries to cythonize with """ libraries = ["proj"] if os.name == "nt": for libdir in libdirs: projlib = list(Path(libdir).glob("proj*.lib")) if projlib: libraries = [str(projlib[0].stem)] break return libraries def get_extension_modules(): """ This function retrieves the extension modules """ if "clean" in sys.argv: return None # make sure cython is available try: from Cython.Build import cythonize except ImportError as error: raise SystemExit( "ERROR: Cython.Build.cythonize not found. " "Cython is required to build pyproj." ) from error # By default we'll try to get options PROJ_DIR or the local version of proj proj_dir = get_proj_dir() library_dirs = get_proj_libdirs(proj_dir) include_dirs = get_proj_incdirs(proj_dir) proj_version = get_proj_version(proj_dir) check_proj_version(proj_version) proj_version_major, proj_version_minor, proj_version_patch = proj_version # setup extension options ext_options = { "include_dirs": include_dirs, "library_dirs": library_dirs, "runtime_library_dirs": ( library_dirs if os.name != "nt" and sys.platform != "cygwin" else None ), "libraries": get_libraries(library_dirs), } # setup cythonized modules return cythonize( [ Extension("pyproj._geod", ["pyproj/_geod.pyx"], **ext_options), Extension("pyproj._crs", ["pyproj/_crs.pyx"], **ext_options), Extension( "pyproj._transformer", ["pyproj/_transformer.pyx"], **ext_options ), Extension("pyproj._compat", ["pyproj/_compat.pyx"], **ext_options), Extension("pyproj.database", ["pyproj/database.pyx"], **ext_options), Extension("pyproj._context", ["pyproj/_context.pyx"], **ext_options), Extension("pyproj.list", ["pyproj/list.pyx"], **ext_options), Extension("pyproj._network", ["pyproj/_network.pyx"], **ext_options), Extension("pyproj._sync", ["pyproj/_sync.pyx"], **ext_options), Extension("pyproj._version", ["pyproj/_version.pyx"], **ext_options), ], quiet=True, compile_time_env={ "CTE_PROJ_VERSION_MAJOR": proj_version_major, "CTE_PROJ_VERSION_MINOR": proj_version_minor, "CTE_PROJ_VERSION_PATCH": proj_version_patch, "CTE_PYTHON_IMPLEMENTATION": platform.python_implementation(), }, **get_cythonize_options(), ) def get_package_data() -> dict[str, list[str]]: """ This function retrieves the package data """ # setup package data package_data = {"pyproj": ["*.pyi", "py.typed"]} if os.environ.get("PROJ_WHEEL") is not None and INTERNAL_PROJ_DIR.exists(): package_data["pyproj"].append( str(BASE_INTERNAL_PROJ_DIR / "share" / "proj" / "*") ) if ( os.environ.get("PROJ_WHEEL") is not None and (CURRENT_FILE_PATH / "pyproj" / ".lib").exists() ): package_data["pyproj"].append(os.path.join(".lib", "*")) return package_data # static items in pyproject.toml setup( ext_modules=get_extension_modules(), package_data=get_package_data(), # temptorary hack to add in metadata url="https://github.com/pyproj4/pyproj", ) pyproj-3.7.1/test/000077500000000000000000000000001475425760300140415ustar00rootroot00000000000000pyproj-3.7.1/test/__init__.py000066400000000000000000000000001475425760300161400ustar00rootroot00000000000000pyproj-3.7.1/test/conftest.py000066400000000000000000000056101475425760300162420ustar00rootroot00000000000000import os import pickle from contextlib import contextmanager from pathlib import Path import numpy import pytest from packaging import version import pyproj from pyproj.datadir import get_data_dir, get_user_data_dir, set_data_dir _NETWORK_ENABLED = pyproj.network.is_network_enabled() PROJ_LOOSE_VERSION = version.parse(pyproj.__proj_version__) PROJ_GTE_921 = PROJ_LOOSE_VERSION >= version.parse("9.2.1") PROJ_GTE_93 = PROJ_LOOSE_VERSION >= version.parse("9.3.0") PROJ_GTE_941 = PROJ_LOOSE_VERSION >= version.parse("9.4.1") PROJ_GTE_95 = PROJ_LOOSE_VERSION >= version.parse("9.5.0") def unset_data_dir(): pyproj.datadir._USER_PROJ_DATA = None pyproj.datadir._VALIDATED_PROJ_DATA = None @contextmanager def proj_network_env(): """ Ensure global context network settings reset """ try: yield finally: pyproj.network.set_network_enabled(_NETWORK_ENABLED) @contextmanager def proj_env(): """ Ensure environment variable the same at the end of the test. """ unset_data_dir() try: yield finally: # make sure the data dir is cleared unset_data_dir() # reset back to the original path set_data_dir(get_data_dir()) @contextmanager def tmp_chdir(new_dir): """ This temporarily changes directories when running the tests. Useful for when testing wheels in the pyproj directory when pyproj has not been build and prevents conflicts. """ curdir = os.getcwd() try: os.chdir(new_dir) yield finally: os.chdir(curdir) def grids_available(*grid_names, check_network=True, check_all=False): """ Check if the grids are available """ if check_network and pyproj.network.is_network_enabled(): return True available = [ ( Path(get_data_dir(), grid_name).exists() or Path(get_user_data_dir(), grid_name).exists() ) for grid_name in grid_names ] if check_all: return all(available) return any(available) def assert_can_pickle(raw_obj, tmp_path): file_path = tmp_path / "temporary.pickle" with open(file_path, "wb") as f: pickle.dump(raw_obj, f) with open(file_path, "rb") as f: unpickled = pickle.load(f) assert raw_obj == unpickled def _make_1_element_array(data: float): """ Turn the float into a 1-element array """ return numpy.array([data]) def _make_2_element_array(data: float): """ Turn the float into a 2-element array """ return numpy.array([data] * 2) @pytest.fixture( params=[ float, numpy.array, _make_1_element_array, _make_2_element_array, ] ) def scalar_and_array(request): """ Ensure cython methods are tested with scalar and arrays to trigger point optimized functions as well as the main functions supporting arrays. """ return request.param pyproj-3.7.1/test/crs/000077500000000000000000000000001475425760300146305ustar00rootroot00000000000000pyproj-3.7.1/test/crs/test_crs.py000066400000000000000000001556141475425760300170440ustar00rootroot00000000000000import concurrent.futures import json import platform from unittest.mock import patch import numpy import pytest import pyproj from pyproj import CRS from pyproj._crs import AuthorityMatchInfo from pyproj.crs import ( CoordinateOperation, CoordinateSystem, Datum, Ellipsoid, PrimeMeridian, ) from pyproj.crs.enums import CoordinateOperationType, DatumType from pyproj.enums import ProjVersion, WktVersion from pyproj.exceptions import CRSError from pyproj.transformer import TransformerGroup from test.conftest import PROJ_GTE_921, PROJ_GTE_941, assert_can_pickle, grids_available class CustomCRS: def to_wkt(self): return CRS.from_epsg(4326).to_wkt() def test_from_proj4_json(): json_str = '{"proj": "longlat", "ellps": "WGS84", "datum": "WGS84"}' proj = CRS.from_string(json_str) with pytest.warns(UserWarning): assert proj.to_proj4(4) == "+proj=longlat +datum=WGS84 +no_defs +type=crs" assert proj.to_proj4(5) == "+proj=longlat +datum=WGS84 +no_defs +type=crs" # Test with invalid JSON code with pytest.raises(CRSError): assert CRS.from_string("{foo: bar}") def test_from_proj4(): proj = CRS.from_proj4("+proj=longlat +datum=WGS84 +no_defs +type=crs") with pytest.warns(UserWarning): assert proj.to_proj4() == "+proj=longlat +datum=WGS84 +no_defs +type=crs" def test_from_proj4__invalid(): # Test with invalid JSON code with pytest.raises(CRSError): assert CRS.from_proj4(CRS(3857).to_wkt()) def test_from_epsg(): proj = CRS.from_epsg(4326) assert proj.to_epsg() == 4326 # Test with invalid EPSG code with pytest.raises(CRSError): assert CRS.from_epsg(0) def test_from_epsg_string(): proj = CRS.from_string("epsg:4326") assert proj.to_epsg() == 4326 # Test with invalid EPSG code with pytest.raises(CRSError): assert CRS.from_string("epsg:xyz") def test_from_epsg_int_like_string(): proj = CRS.from_string("4326") assert proj.to_epsg() == 4326 # Test with invalid EPSG code with pytest.raises(CRSError): assert CRS.from_string("0") def test_from_string(): wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") with pytest.warns(UserWarning): assert wgs84_crs.to_proj4() == "+proj=longlat +datum=WGS84 +no_defs +type=crs" # Make sure this doesn't get handled using the from_epsg() # even though 'epsg' is in the string with pytest.warns(FutureWarning): epsg_init_crs = CRS.from_string("+init=epsg:26911 +units=m +no_defs=True") with pytest.warns(UserWarning): assert ( epsg_init_crs.to_proj4() == "+proj=utm +zone=11 +datum=NAD83 +units=m +no_defs +type=crs" ) def test_from_numpy(): crs_numpy = numpy.array([4326])[0] proj = CRS.from_user_input(crs_numpy) assert proj.to_epsg() == 4326 # Test with invalid EPSG code with pytest.raises(CRSError): crs_numpy = numpy.array([0])[0] assert CRS.from_epsg(crs_numpy) def test_from_string__invalid(): with pytest.raises(CRSError, match="CRS input is not a string"): CRS.from_string(4326) def test_initialize_projparams_with_kwargs(): crs_mixed_args = CRS("+proj=utm +zone=10", ellps="WGS84") crs_positional = CRS("+proj=utm +zone=10 +ellps=WGS84") assert crs_mixed_args.is_exact_same(crs_positional) def test_bare_parameters(): """Make sure that bare parameters (e.g., no_defs) are handled properly, even if they come in with key=True. This covers interaction with pyproj, which makes presents bare parameters as key=.""" # Example produced by pyproj proj = CRS.from_string( "+proj=lcc +lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) with pytest.warns(UserWarning): assert "+no_defs" in proj.to_proj4(4) # TODO: THIS DOES NOT WORK proj = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +proj=lcc +y_0=0 +no_defs=False " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) # assert "+no_defs" not in proj.to_proj4(4) def test_is_geographic(): assert CRS("EPSG:4326").is_geographic is True assert CRS("EPSG:3857").is_geographic is False wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") assert wgs84_crs.is_geographic is True nad27_crs = CRS.from_string("+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs") assert nad27_crs.is_geographic is True lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 " "+units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert lcc_crs.is_geographic is False def test_is_projected(): assert CRS("EPSG:3857").is_projected is True lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 " "+units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert CRS.from_user_input(lcc_crs).is_projected is True wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") assert CRS.from_user_input(wgs84_crs).is_projected is False def test_is_compound(): assert CRS("EPSG:4326+5773").is_compound assert not CRS("EPSG:4326").is_compound def test_is_same_crs(): crs1 = CRS("urn:ogc:def:crs:OGC::CRS84") crs2 = CRS("EPSG:3857") assert crs1 == crs1 assert crs1 != crs2 wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84") assert crs1 == wgs84_crs # Make sure that same projection with different parameter are not equal lcc_crs1 = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) lcc_crs2 = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc " "+x_0=0 +units=m +lat_2=77 +lat_1=45 +lat_0=0" ) assert lcc_crs1 != lcc_crs2 def test_to_proj4(): with pytest.warns(UserWarning): assert ( CRS("EPSG:4326").to_proj4(4) == "+proj=longlat +datum=WGS84 +no_defs +type=crs" ) def test_empty_json(): with pytest.raises(CRSError): CRS.from_string("{}") with pytest.raises(CRSError): CRS.from_string("[]") with pytest.raises(CRSError): CRS.from_string("") def test_has_wkt_property(): with pytest.warns(FutureWarning): assert ( CRS({"init": "EPSG:4326"}) .to_wkt("WKT1_GDAL") .startswith('GEOGCS["WGS 84",DATUM') ) def test_to_wkt_pretty(): crs = CRS.from_epsg(4326) assert "\n" in crs.to_wkt(pretty=True) assert "\n" not in crs.to_wkt() def test_no_non_deprecated(): crs = CRS.from_epsg(4326) assert not crs.is_deprecated non_dep = crs.get_non_deprecated() assert len(non_dep) == 0 def test_non_deprecated(): crs = CRS.from_epsg(28473) assert crs.is_deprecated non_dep = crs.get_non_deprecated() assert len(non_dep) == 1 assert "EPSG:2503" == ":".join(non_dep[0].to_authority()) def test_non_deprecated_empty(): crs = CRS.from_epsg(3151) assert crs.is_deprecated assert len(crs.get_non_deprecated()) == 0 def test_non_deprecated_multiple(): crs = CRS.from_epsg(3315) assert crs.is_deprecated non_dep = [":".join(el.to_authority()) for el in crs.get_non_deprecated()] assert len(non_dep) == 4 for elem in ["EPSG:3989", "EPSG:3988", "EPSG:3987", "EPSG:3986"]: assert elem in non_dep @pytest.mark.parametrize( "version, expected", [ ("WKT1_GDAL", False), ("WKT1_ESRI", False), ("WKT2_2019", True), ], ) def test_to_wkt_with_axis_rule_4326(version, expected): crs = CRS.from_epsg(4326) axis = "AXIS" assert (axis in crs.to_wkt(version)) == expected assert (axis in crs.to_wkt(version, output_axis_rule=None)) == expected assert axis in crs.to_wkt(version, output_axis_rule=True) assert axis not in crs.to_wkt(version, output_axis_rule=False) @pytest.mark.parametrize( "version, expected", [ ("WKT1_GDAL", True), ("WKT1_ESRI", False), ("WKT2_2019", True), ], ) def test_to_wkt_with_axis_rule_32630(version, expected): crs = CRS.from_epsg(32630) axis = "AXIS" assert (axis in crs.to_wkt(version)) == expected assert (axis in crs.to_wkt(version, output_axis_rule=None)) == expected assert axis in crs.to_wkt(version, output_axis_rule=True) assert axis not in crs.to_wkt(version, output_axis_rule=False) def test_repr(): with pytest.warns(FutureWarning): assert repr(CRS({"init": "EPSG:4326"})) == ( "\n" "Name: WGS 84\n" "Axis Info [ellipsoidal]:\n" "- lon[east]: Longitude (degree)\n" "- lat[north]: Latitude (degree)\n" "Area of Use:\n" "- name: World.\n" "- bounds: (-180.0, -90.0, 180.0, 90.0)\n" "Datum: World Geodetic System 1984 ensemble\n" "- Ellipsoid: WGS 84\n" "- Prime Meridian: Greenwich\n" ) def test_repr__long(): with pytest.warns(FutureWarning): wkt_str = 'GEOGCRS["WGS 84",ENSEMBLE["World Geodetic System 1' assert repr(CRS(CRS({"init": "EPSG:4326"}).to_wkt())) == ( f"\n" "Name: WGS 84\n" "Axis Info [ellipsoidal]:\n" "- lon[east]: Longitude (degree)\n" "- lat[north]: Latitude (degree)\n" "Area of Use:\n" "- name: World.\n" "- bounds: (-180.0, -90.0, 180.0, 90.0)\n" "Datum: World Geodetic System 1984 ensemble\n" "- Ellipsoid: WGS 84\n" "- Prime Meridian: Greenwich\n" ) def test_repr_epsg(): assert repr(CRS(CRS("EPSG:4326").to_wkt())) == ( "\n" "Name: WGS 84\n" "Axis Info [ellipsoidal]:\n" "- Lat[north]: Geodetic latitude (degree)\n" "- Lon[east]: Geodetic longitude (degree)\n" "Area of Use:\n" "- name: World.\n" "- bounds: (-180.0, -90.0, 180.0, 90.0)\n" "Datum: World Geodetic System 1984 ensemble\n" "- Ellipsoid: WGS 84\n" "- Prime Meridian: Greenwich\n" ) def test_repr__undefined(): assert repr( CRS( "+proj=merc +a=6378137.0 +b=6378137.0 +nadgrids=@null" " +lon_0=0.0 +x_0=0.0 +y_0=0.0 +units=m +no_defs" ) ) == ( "\n" "Name: unknown\n" "Axis Info [cartesian]:\n" "- E[east]: Easting (metre)\n" "- N[north]: Northing (metre)\n" "Area of Use:\n" "- undefined\n" "Coordinate Operation:\n" "- name: unknown to WGS84\n" "- method: NTv2\n" "Datum: unknown using nadgrids=@null\n" "- Ellipsoid: unknown\n" "- Prime Meridian: Greenwich\n" "Source CRS: unknown\n" ) def test_repr_compound(): assert repr(CRS.from_epsg(3901)) == ( "\n" "Name: KKJ / Finland Uniform Coordinate System + N60 height\n" "Axis Info [cartesian|vertical]:\n" "- X[north]: Northing (metre)\n" "- Y[east]: Easting (metre)\n" "- H[up]: Gravity-related height (metre)\n" "Area of Use:\n" "- name: Finland - onshore.\n" "- bounds: (19.24, 59.75, 31.59, 70.09)\n" "Datum: Kartastokoordinaattijarjestelma (1966)\n" "- Ellipsoid: International 1924\n" "- Prime Meridian: Greenwich\n" "Sub CRS:\n" "- KKJ / Finland Uniform Coordinate System\n" "- N60 height\n" ) def test_axis_info_compound(): assert [axis.direction for axis in CRS.from_epsg(3901).axis_info] == [ "north", "east", "up", ] def test_dunder_str(): with pytest.warns(FutureWarning): assert str(CRS({"init": "EPSG:4326"})) == CRS({"init": "EPSG:4326"}).srs def test_epsg(): with pytest.warns(FutureWarning): assert CRS({"init": "EPSG:4326"}).to_epsg(20) == 4326 assert CRS({"init": "EPSG:4326"}).to_epsg() is None assert CRS.from_user_input(4326).to_epsg() == 4326 assert CRS.from_epsg(4326).to_epsg() == 4326 assert CRS.from_user_input("epsg:4326").to_epsg() == 4326 def test_datum(): datum = CRS.from_epsg(4326).datum assert "\n" in repr(datum) datum_wkt = 'ENSEMBLE["World Geodetic System 1984 ensemble"' assert repr(datum).startswith(datum_wkt) assert datum.to_wkt().startswith(datum_wkt) assert datum == datum assert datum.is_exact_same(datum) def test_datum_horizontal(): assert CRS.from_epsg(5972).datum == CRS.from_epsg(25832).datum def test_datum_unknown(): crs = CRS( "+proj=omerc +lat_0=-36.10360962430914 " "+lonc=147.06322917270154 +alpha=-54.786229796129035 " "+k=1 +x_0=0 +y_0=0 +gamma=0 +ellps=WGS84 " "+towgs84=0,0,0,0,0,0,0 +units=m +no_defs" ) datum_name = "Unknown based on WGS84 ellipsoid" if PROJ_GTE_921: datum_name = "Unknown based on WGS 84 ellipsoid" assert crs.datum.name == f"{datum_name} using towgs84=0,0,0,0,0,0,0" def test_epsg__not_found(): assert CRS("+proj=longlat +datum=WGS84 +no_defs +towgs84=0,0,0").to_epsg(0) is None assert ( CRS.from_string("+proj=longlat +datum=WGS84 +no_defs +towgs84=0,0,0").to_epsg() is None ) def test_epsg__no_code_available(): lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert lcc_crs.to_epsg() is None def test_crs_OSR_equivalence(): crs1 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") crs2 = CRS.from_string("+proj=latlong +datum=WGS84 +no_defs") with pytest.warns(FutureWarning): crs3 = CRS({"init": "EPSG:4326"}) assert crs1 == crs2 # these are not equivalent in proj.4 now as one uses degrees and the other radians assert crs1 == crs3 def test_crs_OSR_no_equivalence(): crs1 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") crs2 = CRS.from_string("+proj=longlat +datum=NAD27 +no_defs") assert crs1 != crs2 def test_init_from_wkt(): wgs84 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") from_wkt = CRS(wgs84.to_wkt()) assert wgs84.to_wkt() == from_wkt.to_wkt() def test_init_from_wkt_invalid(): with pytest.raises(CRSError): CRS("trash-54322") with pytest.raises(CRSError): CRS("") def test_from_wkt(): wgs84 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") from_wkt = CRS.from_wkt(wgs84.to_wkt()) assert wgs84.to_wkt() == from_wkt.to_wkt() def test_from_wkt_invalid(): with pytest.raises(CRSError), pytest.warns(UserWarning): CRS.from_wkt(CRS(4326).to_proj4()) def test_from_user_input_epsg(): with pytest.warns(UserWarning): assert "+proj=longlat" in CRS.from_user_input("EPSG:4326").to_proj4(4) def test_from_esri_wkt(): projection_string = ( 'PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",' 'GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",' 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' 'PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]],' 'PROJECTION["Albers"],' 'PARAMETER["false_easting",0.0],' 'PARAMETER["false_northing",0.0],' 'PARAMETER["central_meridian",-96.0],' 'PARAMETER["standard_parallel_1",29.5],' 'PARAMETER["standard_parallel_2",45.5],' 'PARAMETER["latitude_of_origin",23.0],' 'UNIT["Meter",1.0],' 'VERTCS["NAVD_1988",' 'VDATUM["North_American_Vertical_Datum_1988"],' 'PARAMETER["Vertical_Shift",0.0],' 'PARAMETER["Direction",1.0],UNIT["Centimeter",0.01]]]' ) proj_crs_str = CRS.from_string(projection_string) proj_crs_wkt = CRS(projection_string) with pytest.warns(UserWarning): assert proj_crs_str.to_proj4() == proj_crs_wkt.to_proj4() assert proj_crs_str.to_proj4(4) == ( "+proj=aea +lat_0=23 +lon_0=-96 +lat_1=29.5 " "+lat_2=45.5 +x_0=0 +y_0=0 +datum=NAD83 +units=m +no_defs +type=crs" ) def test_compound_crs(): wkt = """COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433], AUTHORITY["EPSG","4326"]],VERT_CS["unknown", VERT_DATUM["unknown",2005],UNIT["metre",1.0, AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""" assert CRS(wkt).to_wkt("WKT1_GDAL").startswith('COMPD_CS["unknown",GEOGCS["WGS 84"') def test_ellipsoid(): crs1 = CRS.from_epsg(4326) assert f"{crs1.ellipsoid.inverse_flattening:.3f}" == "298.257" assert f"{crs1.ellipsoid.semi_major_metre:.3f}" == "6378137.000" assert f"{crs1.ellipsoid.semi_minor_metre:.3f}" == "6356752.314" def test_ellipsoid__semi_minor_not_computed(): cc = CRS("+proj=geos +lon_0=-89.5 +a=6378137.0 +b=6356752.31 h=12345") assert cc.datum.ellipsoid.semi_minor_metre == 6356752.31 assert cc.datum.ellipsoid.semi_major_metre == 6378137.0 assert not cc.datum.ellipsoid.is_semi_minor_computed def test_area_of_use(): crs1 = CRS.from_epsg(4326) assert crs1.area_of_use.bounds == (-180.0, -90.0, 180.0, 90.0) assert crs1.area_of_use.name == "World." def test_from_user_input_custom_crs_class(): assert CRS.from_user_input(CustomCRS()) == CRS.from_epsg(4326) def test_non_crs_error(): with pytest.raises(CRSError, match="Input is not a CRS"): CRS( "+proj=pipeline +ellps=GRS80 +step +proj=merc " "+step +proj=axisswap +order=2,1" ) def test_sub_crs(): crs = CRS.from_epsg(5972) sub_crs_list = crs.sub_crs_list assert len(sub_crs_list) == 2 assert sub_crs_list[0] == CRS.from_epsg(25832) assert sub_crs_list[1] == CRS.from_epsg(5941) assert crs.is_projected assert crs.is_vertical assert not crs.is_geographic def test_sub_crs__none(): assert CRS.from_epsg(4326).sub_crs_list == [] def test_coordinate_system(): crs = CRS.from_epsg(26915) assert repr(crs.coordinate_system).startswith("CS[Cartesian") assert crs.coordinate_system.name == "cartesian" assert crs.coordinate_system.name == str(crs.coordinate_system) assert crs.coordinate_system.axis_list == crs.axis_info assert len(crs.coordinate_system.axis_list) == 2 def test_coordinate_system_geog(): crs = CRS.from_epsg(4326) assert repr(crs.coordinate_system).startswith("CS[ellipsoidal") assert crs.coordinate_system.name == "ellipsoidal" assert crs.coordinate_system.name == str(crs.coordinate_system) assert crs.coordinate_system.axis_list == crs.axis_info assert repr(crs.coordinate_system.axis_list) == ( "[Axis(name=Geodetic latitude, abbrev=Lat, direction=north, " "unit_auth_code=EPSG, unit_code=9122, unit_name=degree), " "Axis(name=Geodetic longitude, abbrev=Lon, direction=east, " "unit_auth_code=EPSG, unit_code=9122, unit_name=degree)]" ) def test_coordinate_operation(): crs = CRS.from_epsg(26915) assert repr(crs.coordinate_operation) == ( "\n" "Name: UTM zone 15N\n" "Method: Transverse Mercator\n" "Area of Use:\n" "- name: Between 96°W and 90°W, northern hemisphere between equator and 84°N, " "onshore and offshore.\n" "- bounds: (-96.0, 0.0, -90.0, 84.0)" ) assert crs.coordinate_operation.method_name == "Transverse Mercator" assert crs.coordinate_operation.name == str(crs.coordinate_operation) assert crs.coordinate_operation.method_auth_name == "EPSG" assert crs.coordinate_operation.method_code == "9807" assert crs.coordinate_operation.is_instantiable == 1 assert crs.coordinate_operation.has_ballpark_transformation == 0 assert crs.coordinate_operation.accuracy == -1.0 assert repr(crs.coordinate_operation.params) == ( "[Param(name=Latitude of natural origin, auth_name=EPSG, code=8801, " "value=0.0, unit_name=degree, unit_auth_name=EPSG, " "unit_code=9102, unit_category=angular), " "Param(name=Longitude of natural origin, auth_name=EPSG, code=8802, " "value=-93.0, unit_name=degree, unit_auth_name=EPSG, " "unit_code=9102, unit_category=angular), " "Param(name=Scale factor at natural origin, auth_name=EPSG, code=8805, " "value=0.9996, unit_name=unity, unit_auth_name=EPSG, " "unit_code=9201, unit_category=scale), " "Param(name=False easting, auth_name=EPSG, code=8806, value=500000.0, " "unit_name=metre, unit_auth_name=EPSG, unit_code=9001, unit_category=linear), " "Param(name=False northing, auth_name=EPSG, code=8807, value=0.0, " "unit_name=metre, unit_auth_name=EPSG, unit_code=9001, unit_category=linear)]" ) assert crs.coordinate_operation.grids == [] def test_coordinate_operation_grids(): cc = CoordinateOperation.from_epsg(1312) if not cc.grids[0].full_name: assert ( repr(cc.grids) == "[Grid(short_name=NTv1_0.gsb, full_name=, package_name=, url=, " "direct_download=False, open_license=False, available=False)]" ) else: assert ( repr(cc.grids) == "[Grid(short_name=NTv1_0.gsb, full_name=NTv1_0.gsb, package_name=, " "url=, direct_download=False, open_license=False, available=False)]" ) @pytest.mark.grid def test_coordinate_operation_grids__alternative_grid_name(): cc = CoordinateOperation.from_epsg(1312, True) assert len(cc.grids) == 1 grid = cc.grids[0] assert grid.direct_download is True assert grid.open_license is True assert grid.short_name == "ca_nrc_ntv1_can.tif" assert grid.package_name == "" assert grid.url == "https://cdn.proj.org/ca_nrc_ntv1_can.tif" if grids_available(grid.short_name, check_network=False): assert grid.available is True assert grid.full_name.endswith(grid.short_name) elif pyproj.network.is_network_enabled(): assert grid.available is True if PROJ_GTE_941: assert grid.full_name == "" else: assert grid.full_name == grid.url else: assert grid.available is False assert grid.full_name == "" def test_coordinate_operation__missing(): crs = CRS.from_epsg(4326) assert crs.coordinate_operation is None def test_coordinate_operation__from_epsg(): cc = CoordinateOperation.from_epsg(16031) assert cc.method_auth_name == "EPSG" assert cc.method_code == "9807" def test_coordinate_operation__from_authority(): cc = CoordinateOperation.from_authority("EPSG", 16031) assert cc.method_auth_name == "EPSG" assert cc.method_code == "9807" @pytest.mark.parametrize( "user_input", [ 1671, ("EPSG", 1671), "urn:ogc:def:coordinateOperation:EPSG::1671", CoordinateOperation.from_epsg(1671), CoordinateOperation.from_epsg(1671).to_json_dict(), "RGF93 v1 to WGS 84 (1)", ], ) def test_coordinate_operation__from_user_input(user_input): assert CoordinateOperation.from_user_input( user_input ) == CoordinateOperation.from_epsg(1671) def test_coordinate_operation__from_user_input__invalid(): with pytest.raises(CRSError, match="Invalid coordinate operation"): CoordinateOperation.from_user_input({}) def test_coordinate_operation__from_epsg__empty(): with pytest.raises(CRSError, match="Invalid authority"): CoordinateOperation.from_epsg(1) def test_coordinate_operation__from_authority__empty(): with pytest.raises(CRSError, match="Invalid authority"): CoordinateOperation.from_authority("BOB", 4326) def test_datum__from_epsg(): datum_wkt_prefix = ( 'ENSEMBLE["World Geodetic System 1984 ensemble",' 'MEMBER["World Geodetic System 1984 (Transit)",ID["EPSG",1166]],' 'MEMBER["World Geodetic System 1984 (G730)",ID["EPSG",1152]],' 'MEMBER["World Geodetic System 1984 (G873)",ID["EPSG",1153]],' 'MEMBER["World Geodetic System 1984 (G1150)",ID["EPSG",1154]],' 'MEMBER["World Geodetic System 1984 (G1674)",ID["EPSG",1155]],' 'MEMBER["World Geodetic System 1984 (G1762)",ID["EPSG",1156]],' 'MEMBER["World Geodetic System 1984 (G2139)",ID["EPSG",1309]],' ) datum_wkt_suffix = ( 'ELLIPSOID["WGS 84",6378137,298.257223563,LENGTHUNIT["metre",1],' 'ID["EPSG",7030]],ENSEMBLEACCURACY[2.0],ID["EPSG",6326]]' ) # Testing this way avoids problems when new members are added to the datum ensemble assert Datum.from_epsg("6326").to_wkt().startswith(datum_wkt_prefix) assert Datum.from_epsg("6326").to_wkt().endswith(datum_wkt_suffix) def test_datum__from_authority(): dt = Datum.from_authority("EPSG", 6326) assert dt.name == "World Geodetic System 1984 ensemble" def test_datum__from_epsg__invalid(): with pytest.raises(CRSError, match="Invalid authority"): Datum.from_epsg(1) def test_datum__from_authority__invalid(): with pytest.raises(CRSError, match="Invalid authority"): Datum.from_authority("BOB", 1) @pytest.mark.parametrize( "user_input", [ 6326, ("EPSG", 6326), "urn:ogc:def:ensemble:EPSG::6326", Datum.from_epsg(6326), Datum.from_epsg(6326).to_json_dict(), "World Geodetic System 1984", ], ) def test_datum__from_user_input(user_input): assert Datum.from_user_input(user_input) == Datum.from_epsg(6326) def test_datum__from_user_input__invalid(): with pytest.raises(CRSError, match="Invalid datum"): Datum.from_user_input({}) def test_prime_meridian__from_epsg(): assert PrimeMeridian.from_epsg(8903).to_wkt() == ( 'PRIMEM["Paris",2.5969213,ANGLEUNIT["grad",0.0157079632679489],ID["EPSG",8903]]' ) def test_prime_meridian__from_authority(): assert PrimeMeridian.from_authority("EPSG", 8903).name == "Paris" def test_prime_meridian__from_epsg__invalid(): with pytest.raises(CRSError, match="Invalid authority"): PrimeMeridian.from_epsg(1) def test_prime_meridian__from_authority__invalid(): with pytest.raises(CRSError, match="Invalid authority"): PrimeMeridian.from_authority("Bob", 1) @pytest.mark.parametrize( "user_input", [ 8901, ("EPSG", 8901), "urn:ogc:def:meridian:EPSG::8901", PrimeMeridian.from_epsg(8901), PrimeMeridian.from_epsg(8901).to_json_dict(), "Greenwich", ], ) def test_prime_meridian__from_user_input(user_input): assert PrimeMeridian.from_user_input(user_input) == PrimeMeridian.from_epsg(8901) def test_prime_meridian__from_user_input__invalid(): with pytest.raises(CRSError, match="Invalid prime meridian"): PrimeMeridian.from_user_input({}) def test_ellipsoid__from_epsg(): assert Ellipsoid.from_epsg(7030).to_wkt() == ( 'ELLIPSOID["WGS 84",6378137,298.257223563,' 'LENGTHUNIT["metre",1],ID["EPSG",7030]]' ) def test_ellipsoid__from_authority(): assert Ellipsoid.from_authority("EPSG", 7030).name == "WGS 84" def test_ellipsoid__from_epsg__invalid(): with pytest.raises(CRSError, match="Invalid authority"): Ellipsoid.from_epsg(1) def test_ellipsoid__from_authority__invalid(): with pytest.raises(CRSError, match="Invalid authority"): Ellipsoid.from_authority("BOB", 1) @pytest.mark.parametrize( "user_input", [ 7001, ("EPSG", 7001), "urn:ogc:def:ellipsoid:EPSG::7001", Ellipsoid.from_epsg(7001), Ellipsoid.from_epsg(7001).to_json_dict(), "Airy 1830", ], ) def test_ellipsoid__from_user_input(user_input): assert Ellipsoid.from_user_input(user_input) == Ellipsoid.from_epsg(7001) def test_ellipsoid__from_user_input__invalid(): with pytest.raises(CRSError, match="Invalid ellipsoid"): Ellipsoid.from_user_input({}) CS_JSON_DICT = { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "CoordinateSystem", "subtype": "Cartesian", "axis": [ {"name": "Easting", "abbreviation": "E", "direction": "east", "unit": "metre"}, { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": "metre", }, ], } @pytest.mark.parametrize( "user_input", [ CS_JSON_DICT, json.dumps(CS_JSON_DICT), CoordinateSystem.from_json_dict(CS_JSON_DICT), ], ) def test_coordinate_system__from_user_input(user_input): assert CoordinateSystem.from_user_input( user_input ) == CoordinateSystem.from_json_dict(CS_JSON_DICT) @pytest.mark.parametrize( "user_input", [ 7001, ("EPSG", 7001), "urn:ogc:def:ellipsoid:EPSG::7001", Ellipsoid.from_epsg(7001), Ellipsoid.from_epsg(7001).to_json_dict(), ], ) def test_coordinate_system__from_user_input__invalid(user_input): with pytest.raises(CRSError, match="Invalid"): CoordinateSystem.from_user_input(user_input) def test_bound_crs_is_geographic(): assert CRS( "proj=longlat datum=WGS84 no_defs ellps=WGS84 towgs84=0,0,0" ).is_geographic def test_coordinate_operation_towgs84_three(): crs = CRS("+proj=latlong +ellps=GRS80 +towgs84=-199.87,74.79,246.62") assert crs.coordinate_operation.towgs84 == [-199.87, 74.79, 246.62] def test_coordinate_operation_towgs84_seven(): crs = CRS( "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9996 +x_0=2520000 +y_0=0 " "+ellps=intl +towgs84=-122.74,-34.27,-22.83,-1.884,-3.400,-3.030,-15.62" ) assert crs.coordinate_operation.towgs84 == [ -122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62, ] def test_axis_info_bound(): crs = CRS( "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9996 +x_0=2520000 +y_0=0 " "+ellps=intl +towgs84=-122.74,-34.27,-22.83,-1.884,-3.400,-3.030,-15.62" ) assert [axis.direction for axis in crs.axis_info] == ["east", "north"] def test_coordinate_operation_towgs84_missing(): crs = CRS("epsg:3004") assert crs.coordinate_operation.towgs84 == [] @pytest.mark.parametrize( "wkt_version_str, wkt_version_enum", [ ("WKT1_GDAL", WktVersion.WKT1_GDAL), ("WKT2_2018", WktVersion.WKT2_2018), ("WKT2_2018_SIMPLIFIED", WktVersion.WKT2_2018_SIMPLIFIED), ("WKT2_2019", WktVersion.WKT2_2019), ("WKT2_2019_SIMPLIFIED", WktVersion.WKT2_2019_SIMPLIFIED), ("WKT2_2015", WktVersion.WKT2_2015), ("WKT2_2015_SIMPLIFIED", WktVersion.WKT2_2015_SIMPLIFIED), ], ) def test_to_wkt_enum(wkt_version_str, wkt_version_enum): crs = CRS.from_epsg(4326) assert crs.to_wkt(wkt_version_str) == crs.to_wkt(wkt_version_enum) def test_to_wkt_enum__invalid(): crs = CRS.from_epsg(4326) with pytest.raises(ValueError, match="Invalid value"): crs.to_wkt("WKT_INVALID") @pytest.mark.parametrize( "wkt_version", ["WKT2_2015", "WKT2_2015_SIMPLIFIED", "WKT1_GDAL", "WKT1_ESRI"], ) def test_to_wkt_none_warning(wkt_version): wkt_string = ( 'PROJCRS["unknown",BASEGEOGCRS["unknown",DATUM["unknown",' 'ELLIPSOID["WGS 84",6378137,298.257223563,LENGTHUNIT["metre",1,' 'ID["EPSG",9001]]]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199],' 'ID["EPSG",8901]]],CONVERSION["unknown",METHOD["Equidistant Cylindrical",' 'ID["EPSG",1028]],PARAMETER["Latitude of 1st standard parallel",0,' 'ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8823]],' 'PARAMETER["Longitude of natural origin",0,ANGLEUNIT["degree",0.0174532925199],' 'ID["EPSG",8802]],PARAMETER["False easting",0,' 'LENGTHUNIT["unknown",111319.490793274],ID["EPSG",8806]],' 'PARAMETER["False northing",0,LENGTHUNIT["unknown",111319.490793274],' 'ID["EPSG",8807]]],CS[Cartesian,3],AXIS["(E)",east,ORDER[1],' 'LENGTHUNIT["unknown",111319.490793274]],AXIS["(N)",north,ORDER[2],' 'LENGTHUNIT["unknown",111319.490793274]],AXIS["ellipsoidal height (h)",up,' 'ORDER[3],LENGTHUNIT["metre",1,ID["EPSG",9001]]]]' ) crs = CRS.from_wkt(wkt_string) with pytest.raises(CRSError, match="CRS cannot be converted to a WKT string"): assert crs.to_wkt(version=wkt_version) is None def test_to_proj4_none_warning(): crs = CRS("EPSG:4326") with ( patch("pyproj.crs.crs.CRS._crs") as crs_mock, pytest.raises(CRSError, match="CRS cannot be converted to a PROJ string"), ): crs_mock.to_proj4.return_value = None assert crs.to_proj4() is None def test_to_json_none_warning(): crs = CRS("EPSG:4326") with ( patch("pyproj.crs.crs.CRS._crs") as crs_mock, pytest.raises(CRSError, match="CRS cannot be converted to a PROJ JSON string"), ): crs_mock.to_json.return_value = None assert crs.to_json() is None def test_to_proj4_enum(): crs = CRS.from_epsg(4326) with pytest.warns(UserWarning): assert crs.to_proj4(4) == crs.to_proj4(ProjVersion.PROJ_4) assert crs.to_proj4(5) == crs.to_proj4(ProjVersion.PROJ_5) def test_datum_equals(): datum = Datum.from_epsg(6326) assert datum == 6326 assert not datum != 6326 assert datum != "invalid" @pytest.mark.parametrize( "input_str", [ "urn:ogc:def:ensemble:EPSG::6326", "World Geodetic System 1984", ], ) def test_datum__from_string(input_str): dd = Datum.from_string(input_str) assert dd.name == "World Geodetic System 1984 ensemble" assert dd.type_name == "Datum Ensemble" @pytest.mark.parametrize( "input_str, type_name", [ ('ENGINEERINGDATUM["Engineering datum"]', "Engineering Datum"), ('PDATUM["Mean Sea Level",ANCHOR["1013.25 hPa at 15°C"]]', "Parametric Datum"), ( 'TDATUM["Gregorian calendar",CALENDAR["proleptic Gregorian"],' "TIMEORIGIN[0000-01-01]]", "Temporal Datum", ), ], ) def test_datum__from_string__type_name(input_str, type_name): dd = Datum.from_string(input_str) assert dd.type_name == type_name @pytest.mark.parametrize( "input_name", ["World Geodetic System 1984", "WGS84", "WGS 84"] ) def test_datum__from_name(input_name): dd = Datum.from_name(input_name) assert dd.name == "World Geodetic System 1984 ensemble" @pytest.mark.parametrize("auth_name", [None, "ESRI"]) def test_datum_from_name__auth_type(auth_name): dd = Datum.from_name( "WGS_1984_Geoid", auth_name=auth_name, datum_type=DatumType.VERTICAL_REFERENCE_FRAME, ) assert dd.name == "WGS_1984_Geoid" assert dd.type_name == "Vertical Reference Frame" def test_datum_from_name__any_type(): dd = Datum.from_name("WGS_1984_Geoid") assert dd.name == "WGS_1984_Geoid" assert dd.type_name == "Vertical Reference Frame" @pytest.mark.parametrize( "invalid_str", ["3-598y5-98y", "urn:ogc:def:ellipsoid:EPSG::7001"] ) def test_datum__from_name__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid datum name:"): Datum.from_name(invalid_str) def test_datum__from_name__invalid_type(): with pytest.raises(CRSError, match="Invalid datum name: WGS84"): Datum.from_name("WGS84", datum_type="VERTICAL_REFERENCE_FRAME") @pytest.mark.parametrize( "invalid_str", ["3-598y5-98y", "urn:ogc:def:ellipsoid:EPSG::7001"] ) def test_datum__from_string__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid datum string"): Datum.from_string(invalid_str) def test_ellipsoid_equals(): ellipsoid = Ellipsoid.from_epsg(7001) assert ellipsoid == 7001 assert not ellipsoid != 7001 assert ellipsoid != "invalid" @pytest.mark.parametrize("input_str", ["urn:ogc:def:ellipsoid:EPSG::7001", "Airy 1830"]) def test_ellipsoid__from_string(input_str): ee = Ellipsoid.from_string(input_str) assert ee.name == "Airy 1830" @pytest.mark.parametrize( "input_str,long_name", [ ("Airy 1830", "Airy 1830"), ("intl", "International 1924 (Hayford 1909, 1910)"), ( "International 1924 (Hayford 1909, 1910)", "International 1924 (Hayford 1909, 1910)", ), ], ) def test_ellipsoid__from_name(input_str, long_name): ee = Ellipsoid.from_name(input_str) assert ee.name == long_name @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_ellipsoid__from_name__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid ellipsoid name"): Ellipsoid.from_name(invalid_str) def test_ellipsoid__from_name__invalid__auth(): with pytest.raises(CRSError, match="Invalid ellipsoid name"): Ellipsoid.from_name("intl", auth_name="ESRI") @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_ellipsoid__from_string__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid ellipsoid string"): Ellipsoid.from_string(invalid_str) def test_prime_meridian_equals(): pm = PrimeMeridian.from_epsg(8901) assert pm == 8901 assert not pm != 8901 assert pm != "invalid" @pytest.mark.parametrize("input_str", ["urn:ogc:def:meridian:EPSG::8901", "Greenwich"]) def test_prime_meridian__from_string(input_str): pm = PrimeMeridian.from_string(input_str) assert pm.name == "Greenwich" @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_prime_meridian__from_string__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid prime meridian string"): PrimeMeridian.from_string(invalid_str) def test_prime_meridian__from_name(): pm = PrimeMeridian.from_name("Greenwich") assert pm.name == "Greenwich" @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_prime_meridian__from_name__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid prime meridian name"): PrimeMeridian.from_name(invalid_str) def test_coordinate_operation_equals(): co = CoordinateOperation.from_epsg(1671) assert co == 1671 assert not co != 1671 assert co != "invalid" @pytest.mark.parametrize( "input_str", ["urn:ogc:def:coordinateOperation:EPSG::1671", "RGF93 v1 to WGS 84 (1)"], ) def test_coordinate_operation__from_string(input_str): co = CoordinateOperation.from_string(input_str) assert co.name == "RGF93 v1 to WGS 84 (1)" def test_coordinate_operation__from_name(): co = CoordinateOperation.from_name("UTM zone 12N") assert co.name == "UTM zone 12N" def test_coordinate_operation__from_name_auth_type(): co = CoordinateOperation.from_name( "ITRF_2000_To_WGS_1984", auth_name="ESRI", coordinate_operation_type=CoordinateOperationType.TRANSFORMATION, ) assert co.name == "ITRF_2000_To_WGS_1984" @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_coordinate_operation__from_name__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid coordinate operation name"): CoordinateOperation.from_name(invalid_str) @pytest.mark.parametrize("invalid_str", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"]) def test_coordinate_operation__from_string__invalid(invalid_str): with pytest.raises(CRSError, match="Invalid coordinate operation string"): CoordinateOperation.from_string(invalid_str) _COORDINATE_SYSTEM_STR = ( '{"$schema":"https://proj.org/schemas/v0.2/projjson.schema.json",' '"type":"CoordinateSystem","subtype":"ellipsoidal",' '"axis":[{"name":"Geodetic latitude","abbreviation":"Lat",' '"direction":"north","unit":"degree"},' '{"name":"Geodetic longitude","abbreviation":"Lon",' '"direction":"east","unit":"degree"}],' '"id":{"authority":"EPSG","code":6422}}' ) def test_coordinate_system__equals(): cs = CoordinateSystem.from_string(_COORDINATE_SYSTEM_STR) assert cs == _COORDINATE_SYSTEM_STR assert not cs != _COORDINATE_SYSTEM_STR assert cs != "invalid" def test_coordinate_system__from_string(): cs = CoordinateSystem.from_string(_COORDINATE_SYSTEM_STR) assert cs.name == "ellipsoidal" @pytest.mark.parametrize( "invalid_cs_string", ["3-598y5-98y", "urn:ogc:def:datum:EPSG::6326"] ) def test_coordinate_system__from_string__invalid(invalid_cs_string): with pytest.raises(CRSError, match="Invalid coordinate system string"): CoordinateSystem.from_string(invalid_cs_string) def test_to_proj4_enum__invalid(): crs = CRS.from_epsg(4326) with pytest.raises(ValueError, match="Invalid value"), pytest.warns(UserWarning): crs.to_proj4(1) def test_geodetic_crs(): cc = CRS("epsg:3004") assert cc.geodetic_crs.to_epsg() == 4265 def test_itrf_init(): crs = CRS("ITRF2000") assert crs.name == "ITRF2000" def test_compound_crs_init(): crs = CRS("EPSG:2393+5717") assert crs.name == "KKJ / Finland Uniform Coordinate System + N60 height" def test_compound_crs_urn_init(): crs = CRS("urn:ogc:def:crs,crs:EPSG::2393,crs:EPSG::5717") assert crs.name == "KKJ / Finland Uniform Coordinate System + N60 height" def test_from_authority__ignf(): cc = CRS.from_authority("IGNF", "ETRS89UTM28") assert cc.to_authority() == ("IGNF", "ETRS89UTM28") assert cc.to_authority("EPSG") == ("EPSG", "25828") assert cc.to_epsg() == 25828 def test_ignf_authority_repr(): assert repr(CRS.from_authority("IGNF", "ETRS89UTM28")).startswith( "" ) def test_crs_hash(): """hashes of equivalent CRS are equal""" assert hash(CRS.from_epsg(3857)) == hash(CRS.from_epsg(3857)) def test_crs_hash_unequal(): """hashes of non-equivalent CRS are not equal""" assert hash(CRS.from_epsg(3857)) != hash(CRS.from_epsg(4326)) def test_crs_init_user_input(): assert CRS(("IGNF", "ETRS89UTM28")).to_authority() == ("IGNF", "ETRS89UTM28") assert CRS(4326).to_epsg() == 4326 proj4_dict = {"proj": "longlat", "datum": "WGS84", "no_defs": None, "type": "crs"} with pytest.warns(UserWarning): assert CRS({"proj": "lonlat", "datum": "WGS84"}).to_dict() == proj4_dict assert CRS(proj="lonlat", datum="WGS84").to_dict() == proj4_dict assert ( CRS('{"proj": "longlat", "ellps": "WGS84", "datum": "WGS84"}').to_dict() == proj4_dict ) assert CRS(CRS(4326)).is_exact_same(CRS(CustomCRS())) def test_crs_is_exact_same__non_crs_input(): assert CRS(4326).is_exact_same("epsg:4326") with pytest.warns(FutureWarning): assert not CRS(4326).is_exact_same("+init=epsg:4326") def test_derived_projected_crs(): wkt = ( 'DERIVEDPROJCRS["derived projectedCRS",\n' ' BASEPROJCRS["WGS 84 / UTM zone 31N",\n' ' BASEGEOGCRS["WGS 84",\n' ' DATUM["World Geodetic System 1984",\n' ' ELLIPSOID["WGS 84",6378137,298.257223563,\n' ' LENGTHUNIT["metre",1]]],\n' ' PRIMEM["Greenwich",0,\n' ' ANGLEUNIT["degree",0.0174532925199433]]],\n' ' CONVERSION["UTM zone 31N",\n' ' METHOD["Transverse Mercator",\n' ' ID["EPSG",9807]],\n' ' PARAMETER["Latitude of natural origin",0,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8801]],\n' ' PARAMETER["Longitude of natural origin",3,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8802]],\n' ' PARAMETER["Scale factor at natural origin",0.9996,\n' ' SCALEUNIT["unity",1],\n' ' ID["EPSG",8805]],\n' ' PARAMETER["False easting",500000,\n' ' LENGTHUNIT["metre",1],\n' ' ID["EPSG",8806]],\n' ' PARAMETER["False northing",0,\n' ' LENGTHUNIT["metre",1],\n' ' ID["EPSG",8807]]]],\n' ' DERIVINGCONVERSION["unnamed",\n' ' METHOD["PROJ unimplemented"],\n' ' PARAMETER["foo",1.0,UNIT["metre",1]]],\n' " CS[Cartesian,2],\n" ' AXIS["(E)",east,\n' " ORDER[1],\n" ' LENGTHUNIT["metre",1,\n' ' ID["EPSG",9001]]],\n' ' AXIS["(N)",north,\n' " ORDER[2],\n" ' LENGTHUNIT["metre",1,\n' ' ID["EPSG",9001]]]]' ) crs = CRS(wkt) assert crs.is_derived assert crs.type_name == "Derived Projected CRS" def test_to_string__no_auth(): proj = CRS("+proj=latlong +ellps=GRS80 +towgs84=-199.87,74.79,246.62") assert ( proj.to_string() == "+proj=latlong +ellps=GRS80 +towgs84=-199.87,74.79,246.62 +type=crs" ) def test_to_string__auth(): assert CRS(("IGNF", "ETRS89UTM28")).to_string() == "IGNF:ETRS89UTM28" def test_srs__no_plus(): assert ( CRS("proj=longlat datum=WGS84 no_defs").srs == "proj=longlat datum=WGS84 no_defs type=crs" ) def test_equals_different_type(): assert CRS("epsg:4326") != "" assert not CRS("epsg:4326") == "" assert CRS("epsg:4326") != 27700 assert not CRS("epsg:4326") == 27700 assert not CRS("epsg:4326") != 4326 assert CRS("epsg:4326") == 4326 def test_is_exact_same_different_type(): assert not CRS("epsg:4326").is_exact_same(None) def test_compare_crs_non_crs(): assert CRS.from_epsg(4326) != 4.2 assert CRS.from_epsg(4326) == 4326 with pytest.warns(FutureWarning): assert CRS.from_dict({"init": "epsg:4326"}) == {"init": "epsg:4326"} assert CRS.from_dict({"init": "epsg:4326"}) != "epsg:4326" assert CRS("epsg:4326") == CustomCRS() def test_is_geocentric__bound(): with pytest.warns(FutureWarning): ccs = CRS("+init=epsg:4328 +towgs84=0,0,0") assert ccs.is_geocentric def test_is_geocentric(): ccs = CRS.from_epsg(4328) assert ccs.is_geocentric def test_is_vertical(): cc = CRS.from_epsg(5717) assert cc.is_vertical def test_is_engineering(): eng_wkt = ( 'ENGCRS["A construction site CRS",\n' 'EDATUM["P1",ANCHOR["Peg in south corner"]],\n' 'CS[Cartesian,2],\nAXIS["site east",southWest,ORDER[1]],\n' 'AXIS["site north",southEast,ORDER[2]],\n' 'LENGTHUNIT["metre",1.0],\n' 'TIMEEXTENT["date/time t1","date/time t2"]]' ) assert CRS(eng_wkt).is_engineering def test_source_crs__bound(): with pytest.warns(FutureWarning): assert CRS("+init=epsg:4328 +towgs84=0,0,0").source_crs.name == "unknown" def test_source_crs__missing(): assert CRS("epsg:4326").source_crs is None def test_target_crs__bound(): with pytest.warns(FutureWarning): assert CRS("+init=epsg:4328 +towgs84=0,0,0").target_crs.name == "WGS 84" def test_target_crs__missing(): assert CRS("epsg:4326").target_crs is None def test_whitepace_between_equals(): crs = CRS( "+proj =lcc +lat_1= 30.0 +lat_2= 35.0 +lat_0=30.0 +lon_0=87.0 +x_0=0 +y_0=0" ) assert crs.srs == ( "+proj=lcc +lat_1=30.0 +lat_2=35.0 +lat_0=30.0 " "+lon_0=87.0 +x_0=0 +y_0=0 +type=crs" ) def test_to_dict_no_proj4(): crs = CRS( { "a": 6371229.0, "b": 6371229.0, "lon_0": -10.0, "o_lat_p": 30.0, "o_lon_p": 0.0, "o_proj": "longlat", "proj": "ob_tran", } ) with pytest.warns(UserWarning): assert crs.to_dict() == { "R": 6371229, "lon_0": -10, "no_defs": None, "o_lat_p": 30, "o_lon_p": 0, "o_proj": "longlat", "proj": "ob_tran", "type": "crs", } def test_to_dict_from_dict(): cc = CRS.from_epsg(4326) with pytest.warns(UserWarning): assert CRS.from_dict(cc.to_dict()).name == "unknown" def test_from_dict__invalid(): with pytest.raises(CRSError, match="CRS input is not a dict"): CRS.from_dict(4326) @pytest.mark.parametrize( "class_type", [Datum, Ellipsoid, PrimeMeridian, CoordinateOperation, CoordinateSystem], ) def test_incorrectly_initialized(class_type): with pytest.raises(RuntimeError): class_type() def test_scope__remarks(): co = CoordinateOperation.from_epsg("8048") assert "GDA94" in co.scope assert "Scale difference" in co.remarks def test_crs__scope__remarks__missing(): cc = CRS("+proj=latlon") assert cc.scope is None assert cc.remarks is None def test_operations_missing(): cc = CRS(("IGNF", "ETRS89UTM28")) assert cc.coordinate_operation.operations == () def test_operations(): transformer = TransformerGroup(28356, 7856).transformers[0] coord_op = CoordinateOperation.from_string(transformer.to_wkt()) assert coord_op.operations == transformer.operations def test_operations__scope_remarks(): operation = TransformerGroup(28356, 7856).transformers[0].operations[1] coord_op = CoordinateOperation.from_string(operation.to_wkt()) assert coord_op == operation assert coord_op.remarks == operation.remarks assert coord_op.scope == operation.scope def test_crs_equals(): assert CRS(4326).equals("epsg:4326") def test_crs_equals__ignore_axis_order(): with pytest.warns(FutureWarning): assert CRS("epsg:4326").equals("+init=epsg:4326", ignore_axis_order=True) @pytest.mark.parametrize( "crs_input", [ "+proj=utm +zone=15", 26915, "+proj=utm +zone=15 +towgs84=0,0,0", "EPSG:26915+5717", ], ) def test_utm_zone(crs_input): assert CRS(crs_input).utm_zone == "15N" @pytest.mark.parametrize("crs_input", ["+proj=tmerc", "epsg:4326"]) def test_utm_zone__none(crs_input): assert CRS(crs_input).utm_zone is None def test_numpy_bool_kwarg_false(): # Issue 564 south = numpy.array(50) < 0 crs = CRS( proj="utm", zone=32, ellipsis="WGS84", datum="WGS84", units="m", south=south ) assert "south" not in crs.srs def test_numpy_bool_kwarg_true(): # Issue 564 south = numpy.array(50) > 0 crs = CRS( proj="utm", zone=32, ellipsis="WGS84", datum="WGS84", units="m", south=south ) assert "+south " in crs.srs def test_crs_multithread(): # https://github.com/pyproj4/pyproj/issues/782 crs = CRS(4326) def to_wkt(num): return crs.to_wkt() with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: for result in executor.map(to_wkt, range(10)): pass @pytest.mark.skipif( platform.python_implementation() != "CPython", reason="pypy process unstable." ) def test_crs_multiprocess(): # https://github.com/pyproj4/pyproj/issues/933 with concurrent.futures.ProcessPoolExecutor(max_workers=2) as executor: for result in executor.map(CRS, [4326 for _ in range(10)]): pass def test_coordinate_operation__to_proj4(): operation = CoordinateOperation.from_string( "+proj=pipeline +step +proj=axisswap +order=2,1 +step " "+proj=unitconvert +xy_in=deg +xy_out=rad +step " "+proj=webmerc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84" ) proj_string = operation.to_proj4() assert "+proj=pipeline" in proj_string assert "\n" not in proj_string def test_coordinate_operation__to_proj4__pretty(): operation = CoordinateOperation.from_string( "+proj=pipeline +step +proj=axisswap +order=2,1 +step " "+proj=unitconvert +xy_in=deg +xy_out=rad +step " "+proj=webmerc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84" ) proj_string = operation.to_proj4(pretty=True) assert "+proj=pipeline" in proj_string assert "\n" in proj_string @pytest.mark.parametrize( "crs_input", [ "EPSG:4326", "EPSG:2056", ], ) def test_to_3d(crs_input): crs = CRS(crs_input) assert len(crs.axis_info) == 2 crs_3d = crs.to_3d() assert len(crs_3d.axis_info) == 3 vert_axis = crs_3d.axis_info[-1] assert vert_axis.name == "Ellipsoidal height" assert vert_axis.unit_name == "metre" assert vert_axis.direction == "up" assert crs_3d.to_3d() == crs_3d assert crs_3d.name == crs.name def test_to_3d__name(): crs_3d = CRS("EPSG:2056").to_3d(name="TEST") assert crs_3d.name == "TEST" @pytest.mark.parametrize( "crs_input", [ CRS("EPSG:4979"), # native 3D CRS("EPSG:2056").to_3d(), # a 2D CRS converted to 3D CRS("EPSG:4326+5773"), # a 3D CRS based on a compound ], ) def test_to_2d(crs_input): assert len(crs_input.axis_info) == 3 horizon_axis_crs_3d = crs_input.axis_info[:-1] crs_2d = crs_input.to_2d() horizon_axis_crs_2d = crs_input.axis_info assert len(crs_2d.axis_info) == 2 assert horizon_axis_crs_2d[0] == horizon_axis_crs_3d[0] assert horizon_axis_crs_2d[1] == horizon_axis_crs_3d[1] assert crs_2d.to_2d() == crs_2d # For CompoundCRS, the 3D name is initialized different from 2D if crs_input.name == "WGS 84 + EGM96 height": assert crs_2d.name == "WGS 84" # Otherwise, no change else: assert crs_2d.name == crs_input.name def test_to_2d__name(): crs_2d = CRS("EPSG:2056").to_3d().to_2d(name="TEST") assert crs_2d.name == "TEST" def test_crs__pickle(tmp_path): assert_can_pickle(CRS("epsg:4326"), tmp_path) def test_is_derived(): assert CRS( "+proj=ob_tran +o_proj=longlat +o_lat_p=0 +o_lon_p=0 +lon_0=0" ).is_derived assert not CRS("+proj=latlon").is_derived def test_inheritance__from_methods(): class ChildCRS(CRS): def new_method(self): return 1 def assert_inheritance_valid(new_crs): assert new_crs.new_method() == 1 assert isinstance(new_crs, ChildCRS) assert isinstance(new_crs.geodetic_crs, ChildCRS) assert isinstance(new_crs.source_crs, (type(None), ChildCRS)) assert isinstance(new_crs.target_crs, (type(None), ChildCRS)) assert isinstance(new_crs.to_3d(), ChildCRS) for sub_crs in new_crs.sub_crs_list: assert isinstance(sub_crs, ChildCRS) assert_inheritance_valid(ChildCRS.from_epsg(4326)) assert_inheritance_valid(ChildCRS.from_string("EPSG:2056")) with pytest.warns(FutureWarning): assert_inheritance_valid(ChildCRS.from_proj4("+init=epsg:4328 +towgs84=0,0,0")) assert_inheritance_valid(ChildCRS.from_user_input("EPSG:4326+5773")) assert_inheritance_valid(ChildCRS.from_json(CRS(4326).to_json())) assert_inheritance_valid(ChildCRS.from_json_dict(CRS(4326).to_json_dict())) assert_inheritance_valid(ChildCRS.from_wkt(CRS(4326).to_wkt())) def test_list_authority(): assert CRS("+proj=utm +zone=15").list_authority() == [ AuthorityMatchInfo(auth_name="EPSG", code="32615", confidence=70) ] def test_list_authority__multiple(): auth_list = CRS("+proj=longlat").list_authority() assert AuthorityMatchInfo(auth_name="OGC", code="CRS84", confidence=70) in auth_list assert AuthorityMatchInfo(auth_name="EPSG", code="4326", confidence=70) in auth_list pyproj-3.7.1/test/crs/test_crs_cf.py000066400000000000000000002066371475425760300175160ustar00rootroot00000000000000import numpy import pytest from numpy.testing import assert_almost_equal from packaging import version from pyproj import CRS from pyproj.crs import ProjectedCRS from pyproj.crs._cf1x8 import _try_list_if_string from pyproj.crs.coordinate_operation import ( LambertAzimuthalEqualAreaConversion, LambertCylindricalEqualAreaConversion, MercatorAConversion, OrthographicConversion, PolarStereographicAConversion, PolarStereographicBConversion, SinusoidalConversion, StereographicConversion, VerticalPerspectiveConversion, ) from pyproj.exceptions import CRSError from test.conftest import PROJ_LOOSE_VERSION def _to_dict(operation): param_dict = {} for param in operation.params: param_dict[param.name] = param.value return param_dict def _test_roundtrip(expected_cf, wkt_startswith): crs = CRS.from_cf(expected_cf) cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith(wkt_startswith) assert_almost_equal( cf_dict.pop("semi_minor_axis"), expected_cf.pop("semi_minor_axis") ) assert_almost_equal( cf_dict.pop("inverse_flattening"), expected_cf.pop("inverse_flattening") ) if "towgs84" in expected_cf: assert_almost_equal(cf_dict.pop("towgs84"), expected_cf.pop("towgs84")) assert cf_dict == expected_cf def test_cf_from_numpy_dtypes(): cf = { "grid_mapping_name": "lambert_conformal_conic", "standard_parallel": numpy.array([60, 30], dtype="f4"), "longitude_of_central_meridian": numpy.float32(0), "latitude_of_projection_origin": numpy.int32(45), } crs = CRS.from_cf(cf) with pytest.warns(UserWarning): assert crs.to_dict() == { "datum": "WGS84", "lat_0": 45, "lat_1": 60, "lat_2": 30, "lon_0": 0, "no_defs": None, "proj": "lcc", "type": "crs", "units": "m", "x_0": 0, "y_0": 0, } def test_to_cf_transverse_mercator(): crs = CRS( proj="tmerc", lat_0=0, lon_0=15, k=0.9996, x_0=2520000, y_0=0, ellps="intl", units="m", towgs84="-122.74,-34.27,-22.83,-1.884,-3.400,-3.030,-15.62", ) towgs84_test = [-122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62] horizontal_datum_name = ( "Unknown based on International 1924 (Hayford 1909, 1910) ellipsoid using " "towgs84=-122.74,-34.27,-22.83,-1.884,-3.400,-3.030,-15.62" ) expected_cf = { "semi_major_axis": 6378388.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": 297.0, "reference_ellipsoid_name": "International 1924 (Hayford 1909, 1910)", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": horizontal_datum_name, "towgs84": towgs84_test, "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": 15.0, "false_easting": 2520000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 0.9996, "geographic_crs_name": "unknown", "projected_crs_name": "unknown", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("BOUNDCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "BOUNDCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] with pytest.warns(UserWarning): assert crs.to_dict() == { "proj": "tmerc", "lat_0": 0, "lon_0": 15, "k": 0.9996, "x_0": 2520000, "y_0": 0, "ellps": "intl", "towgs84": towgs84_test, "units": "m", "no_defs": None, "type": "crs", } @pytest.mark.parametrize( "towgs84_test", [ (-122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62), "-122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62", ], ) def test_from_cf_transverse_mercator(towgs84_test): crs = CRS.from_cf( { "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0, "longitude_of_central_meridian": 15, "false_easting": 2520000, "false_northing": 0, "reference_ellipsoid_name": "intl", "towgs84": towgs84_test, } ) expected_cf = { "semi_major_axis": 6378388.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": 297.0, "reference_ellipsoid_name": "International 1924 (Hayford 1909, 1910)", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": 15.0, "false_easting": 2520000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 1.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", "horizontal_datum_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("BOUNDCRS[") assert_almost_equal(cf_dict.pop("towgs84"), _try_list_if_string(towgs84_test)) assert cf_dict == expected_cf # test roundtrip expected_cf["towgs84"] = _try_list_if_string(towgs84_test) _test_roundtrip(expected_cf, "BOUNDCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_cf_from_latlon(): crs = CRS.from_cf( dict( grid_mapping_name="latitude_longitude", semi_major_axis=6378137.0, inverse_flattening=298.257223, ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "grid_mapping_name": "latitude_longitude", "geographic_crs_name": "undefined", "reference_ellipsoid_name": "undefined", "horizontal_datum_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("GEOGCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "GEOGCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, ] def test_cf_from_latlon__named(): crs = CRS.from_cf(dict(spatial_ref="epsg:4326")) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "geographic_crs_name": "WGS 84", "grid_mapping_name": "latitude_longitude", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("GEOGCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "GEOGCRS[") def test_cf_from_utm(): crs = CRS.from_cf(dict(crs_wkt="epsg:32615")) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "WGS 84", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "projected_crs_name": "WGS 84 / UTM zone 15N", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": -93.0, "false_easting": 500000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 0.9996, } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_cf_from_utm__nad83(): crs = CRS("epsg:26917") expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "GRS 1980", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "NAD83", "horizontal_datum_name": "North American Datum 1983", "projected_crs_name": "NAD83 / UTM zone 17N", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": -81.0, "false_easting": 500000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 0.9996, } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_cf_rotated_latlon(): crs = CRS.from_cf( dict( grid_mapping_name="rotated_latitude_longitude", grid_north_pole_latitude=32.5, grid_north_pole_longitude=170.0, ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": 32.5, "grid_north_pole_longitude": 170.0, "north_pole_grid_longitude": 0.0, "geographic_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("GEOGCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "GEOGCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "standard_name": "grid_longitude", "long_name": "longitude in rotated pole grid", "units": "degrees", "axis": "X", }, { "standard_name": "grid_latitude", "long_name": "latitude in rotated pole grid", "units": "degrees", "axis": "Y", }, ] with pytest.warns(UserWarning): proj_dict = crs.to_dict() assert proj_dict == { "proj": "ob_tran", "o_proj": "longlat", "o_lat_p": 32.5, "o_lon_p": 0, "lon_0": 350, "datum": "WGS84", "no_defs": None, "type": "crs", } def test_cf_rotated_latlon__grid(): crs = CRS.from_cf( dict( grid_mapping_name="rotated_latitude_longitude", grid_north_pole_latitude=32.5, grid_north_pole_longitude=1.0, north_pole_grid_longitude=170.0, ) ) with pytest.warns(UserWarning): proj_dict = crs.to_dict() assert proj_dict == { "proj": "ob_tran", "o_proj": "longlat", "o_lat_p": 32.5, "o_lon_p": 170.0, "lon_0": 181.0, "datum": "WGS84", "no_defs": None, "type": "crs", } def test_rotated_pole_to_cf(): rotated_pole_wkt = ( 'GEOGCRS["undefined",\n' ' BASEGEOGCRS["Unknown datum based upon the GRS 1980 ellipsoid",\n' ' DATUM["Not specified (based on GRS 1980 ellipsoid)",\n' ' ELLIPSOID["GRS 1980",6378137,298.257222101,\n' ' LENGTHUNIT["metre",1]]],\n' ' PRIMEM["Greenwich",0,\n' ' ANGLEUNIT["degree",0.0174532925199433]]],\n' ' DERIVINGCONVERSION["Pole rotation (netCDF CF convention)",\n' ' METHOD["Pole rotation (netCDF CF convention)"],\n' ' PARAMETER["Grid north pole latitude (netCDF CF ' 'convention)",2,\n' ' ANGLEUNIT["degree",0.0174532925199433,\n' ' ID["EPSG",9122]]],\n' ' PARAMETER["Grid north pole longitude (netCDF CF ' 'convention)",3,\n' ' ANGLEUNIT["degree",0.0174532925199433,\n' ' ID["EPSG",9122]]],\n' ' PARAMETER["North pole grid longitude (netCDF CF ' 'convention)",4,\n' ' ANGLEUNIT["degree",0.0174532925199433,\n' ' ID["EPSG",9122]]]],\n' " CS[ellipsoidal,2],\n" ' AXIS["geodetic latitude (Lat)",north,\n' " ORDER[1],\n" ' ANGLEUNIT["degree",0.0174532925199433,\n' ' ID["EPSG",9122]]],\n' ' AXIS["geodetic longitude (Lon)",east,\n' " ORDER[2],\n" ' ANGLEUNIT["degree",0.0174532925199433,\n' ' ID["EPSG",9122]]]]' ) crs = CRS(rotated_pole_wkt) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": 6356752.314140356, "inverse_flattening": 298.257222101, "reference_ellipsoid_name": "GRS 1980", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "undefined", "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": 2.0, "grid_north_pole_longitude": 3.0, "north_pole_grid_longitude": 4.0, "horizontal_datum_name": "Not specified (based on GRS 1980 ellipsoid)", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("GEOGCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "GEOGCRS[") def test_grib_rotated_pole_to_cf(): rotated_pole_wkt = """GEOGCRS["Coordinate System imported from GRIB file", BASEGEOGCRS["Coordinate System imported from GRIB file", DATUM["unnamed", ELLIPSOID["Sphere",6371229,0, LENGTHUNIT["metre",1, ID["EPSG",9001]]]], PRIMEM["Greenwich",0, ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]]], DERIVINGCONVERSION["Pole rotation (GRIB convention)", METHOD["Pole rotation (GRIB convention)"], PARAMETER["Latitude of the southern pole (GRIB convention)",-33.443381, ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]], PARAMETER["Longitude of the southern pole (GRIB convention)",-93.536426, ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]], PARAMETER["Axis rotation (GRIB convention)",0, ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]]], CS[ellipsoidal,2], AXIS["latitude",north, ORDER[1], ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]], AXIS["longitude",east, ORDER[2], ANGLEUNIT["degree",0.0174532925199433, ID["EPSG",9122]]]]""" crs = CRS(rotated_pole_wkt) with pytest.warns(UserWarning): cf_dict = crs.to_cf(errcheck=True) assert cf_dict.pop("crs_wkt").startswith("GEOGCRS[") assert not cf_dict def test_cf_lambert_conformal_conic_1sp(): crs = CRS.from_cf( dict( grid_mapping_name="lambert_conformal_conic", standard_parallel=25.0, longitude_of_central_meridian=265.0, latitude_of_projection_origin=25.0, ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "lambert_conformal_conic", "longitude_of_central_meridian": 265.0, "false_easting": 0.0, "false_northing": 0.0, "standard_parallel": 25.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] with pytest.warns(UserWarning): proj_dict = crs.to_dict() assert proj_dict == { "proj": "lcc", "lat_1": 25, "lat_0": 25, "lon_0": 265, "k_0": 1, "x_0": 0, "y_0": 0, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs", } @pytest.mark.parametrize("standard_parallel", [[25.0, 30.0], "25., 30."]) def test_cf_lambert_conformal_conic_2sp(standard_parallel): crs = CRS.from_cf( dict( grid_mapping_name="lambert_conformal_conic", standard_parallel=standard_parallel, longitude_of_central_meridian=265.0, latitude_of_projection_origin=25.0, ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "lambert_conformal_conic", "standard_parallel": (25.0, 30.0), "latitude_of_projection_origin": 25.0, "longitude_of_central_meridian": 265.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] with pytest.warns(UserWarning): proj_dict = crs.to_dict() assert proj_dict == { "proj": "lcc", "lat_1": 25, "lat_2": 30, "lat_0": 25, "lon_0": 265, "x_0": 0, "y_0": 0, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs", } def test_oblique_mercator(): crs = CRS.from_cf( dict( grid_mapping_name="oblique_mercator", azimuth_of_central_line=0.35, latitude_of_projection_origin=10, longitude_of_projection_origin=15, reference_ellipsoid_name="WGS84", false_easting=0.0, false_northing=0.0, ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": 10.0, "longitude_of_projection_origin": 15.0, "azimuth_of_central_line": 0.35, "scale_factor_at_projection_origin": 1.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", "horizontal_datum_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] with pytest.warns(UserWarning): assert crs.to_dict() == { "proj": "omerc", "lat_0": 10, "lonc": 15, "alpha": 0.35, "gamma": 0, "k": 1, "x_0": 0, "y_0": 0, "ellps": "WGS84", "units": "m", "no_defs": None, "type": "crs", } def test_oblique_mercator_losing_gamma(): crs = CRS( "+proj=omerc +lat_0=-36.10360962430914 +lonc=147.0632291727015 " "+alpha=-54.78622979612904 +k=1 +x_0=0 +y_0=0 +gamma=-54.78622979612904" ) with pytest.warns( UserWarning, match="angle from rectified to skew grid parameter lost in conversion to CF", ): crs.to_cf() def test_cf_from_invalid(): with pytest.raises(CRSError): CRS.from_cf( dict( longitude_of_central_meridian=265.0, latitude_of_projection_origin=25.0 ) ) with pytest.raises(CRSError): CRS.from_cf( dict(grid_mapping_name="invalid", latitude_of_projection_origin=25.0) ) def test_geos_crs_sweep(): crs = CRS.from_cf( dict( grid_mapping_name="geostationary", perspective_point_height=1, sweep_angle_axis="x", ) ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "geostationary", "sweep_angle_axis": "x", "perspective_point_height": 1.0, "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 0.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_geos_crs_fixed_angle_axis(): crs = CRS.from_cf( dict( grid_mapping_name="geostationary", perspective_point_height=1, fixed_angle_axis="y", ), ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "geostationary", "sweep_angle_axis": "x", "perspective_point_height": 1.0, "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 0.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_geos_proj_string(): crs = CRS({"proj": "geos", "h": 35785831.0, "a": 6378169.0, "b": 6356583.8}) expected_cf = { "semi_major_axis": 6378169.0, "semi_minor_axis": 6356583.8, "inverse_flattening": crs.ellipsoid.inverse_flattening, "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "grid_mapping_name": "geostationary", "sweep_angle_axis": "y", "perspective_point_height": 35785831.0, "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 0.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "unknown", "horizontal_datum_name": "unknown", "projected_crs_name": "unknown", "reference_ellipsoid_name": "unknown", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_ob_tran_not_rotated_latlon(): crs = CRS("+proj=ob_tran +o_proj=moll +o_lat_p=45 +o_lon_p=-90 +lon_0=-90") with pytest.warns(UserWarning): cf_dict = crs.to_cf(errcheck=True) assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == {} def test_mercator_b(): crs = CRS.from_cf( { "grid_mapping_name": "mercator", "longitude_of_projection_origin": 10, "standard_parallel": 21.354, "false_easting": 0, "false_northing": 0, } ) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "mercator", "standard_parallel": 21.354, "longitude_of_projection_origin": 10.0, "false_easting": 0.0, "false_northing": 0.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } with pytest.warns(UserWarning): assert crs.to_dict() == { "datum": "WGS84", "lat_ts": 21.354, "lon_0": 10, "no_defs": None, "proj": "merc", "type": "crs", "units": "m", "x_0": 0, "y_0": 0, } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_osgb_1936(): crs = CRS("OSGB 1936 / British National Grid") param_dict = _to_dict(crs.coordinate_operation) expected_cf = { "semi_major_axis": crs.ellipsoid.semi_major_metre, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "Airy 1830", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "OSGB 1936", "horizontal_datum_name": "OSGB 1936", "projected_crs_name": "OSGB 1936 / British National Grid", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 49.0, "longitude_of_central_meridian": -2.0, "false_easting": 400000.0, "false_northing": -100000.0, "scale_factor_at_central_meridian": param_dict[ "Scale factor at natural origin" ], } if PROJ_LOOSE_VERSION >= version.parse("8.0.1"): expected_cf.update( geographic_crs_name="OSGB36", horizontal_datum_name="Ordnance Survey of Great Britain 1936", projected_crs_name="OSGB36 / British National Grid", ) cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_export_compound_crs(): crs = CRS("urn:ogc:def:crs,crs:EPSG::2393,crs:EPSG::5717") expected_cf = { "semi_major_axis": 6378388.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": 297.0, "reference_ellipsoid_name": "International 1924", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "KKJ", "horizontal_datum_name": "Kartastokoordinaattijarjestelma (1966)", "projected_crs_name": "KKJ / Finland Uniform Coordinate System", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": 27.0, "false_easting": 3500000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 1.0, "geopotential_datum_name": "Helsinki 1960", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("COMPOUNDCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "COMPOUNDCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "standard_name": "height_above_reference_ellipsoid", "long_name": "Gravity-related height", "units": "metre", "positive": "up", "axis": "Z", }, ] def test_geoid_model_name(): wkt = ( 'COMPOUNDCRS["NAD83 / Pennsylvania South + NAVD88 height",\n' ' PROJCRS["NAD83 / Pennsylvania South",\n' ' BASEGEOGCRS["NAD83",\n' ' DATUM["North American Datum 1983",\n' ' ELLIPSOID["GRS 1980",6378137,298.257222101,\n' ' LENGTHUNIT["metre",1]]],\n' ' PRIMEM["Greenwich",0,\n' ' ANGLEUNIT["degree",0.0174532925199433]]],\n' ' CONVERSION["SPCS83 Pennsylvania South zone (meters)",\n' ' METHOD["Lambert Conic Conformal (2SP)",\n' ' ID["EPSG",9802]],\n' ' PARAMETER["Latitude of false origin",39.3333333333333,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8821]],\n' ' PARAMETER["Longitude of false origin",-77.75,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8822]],\n' ' PARAMETER["Latitude of 1st standard ' 'parallel",40.9666666666667,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8823]],\n' ' PARAMETER["Latitude of 2nd standard ' 'parallel",39.9333333333333,\n' ' ANGLEUNIT["degree",0.0174532925199433],\n' ' ID["EPSG",8824]],\n' ' PARAMETER["Easting at false origin",600000,\n' ' LENGTHUNIT["metre",1],\n' ' ID["EPSG",8826]],\n' ' PARAMETER["Northing at false origin",0,\n' ' LENGTHUNIT["metre",1],\n' ' ID["EPSG",8827]]],\n' " CS[Cartesian,2],\n" ' AXIS["easting (X)",east,\n' " ORDER[1],\n" ' LENGTHUNIT["metre",1]],\n' ' AXIS["northing (Y)",north,\n' " ORDER[2],\n" ' LENGTHUNIT["metre",1]]],\n' ' VERTCRS["NAVD88 height",\n' ' VDATUM["North American Vertical Datum 1988"],\n' " CS[vertical,1],\n" ' AXIS["gravity-related height (H)",up,\n' ' LENGTHUNIT["metre",1]],\n' ' GEOIDMODEL["GEOID12B"]]]' ) crs = CRS(wkt) param_dict = _to_dict(crs.sub_crs_list[0].coordinate_operation) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "GRS 1980", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "NAD83", "horizontal_datum_name": "North American Datum 1983", "projected_crs_name": "NAD83 / Pennsylvania South", "grid_mapping_name": "lambert_conformal_conic", "standard_parallel": ( param_dict["Latitude of 1st standard parallel"], param_dict["Latitude of 2nd standard parallel"], ), "latitude_of_projection_origin": param_dict["Latitude of false origin"], "longitude_of_central_meridian": -77.75, "false_easting": 600000.0, "false_northing": 0.0, "geoid_name": "GEOID12B", "geopotential_datum_name": "North American Vertical Datum 1988", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("COMPOUNDCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "COMPOUNDCRS[") assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, { "standard_name": "height_above_reference_ellipsoid", "long_name": "Gravity-related height", "units": "metre", "positive": "up", "axis": "Z", }, ] def test_albers_conical_equal_area(): crs = CRS("ESRI:102008") expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "GRS 1980", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "NAD83", "horizontal_datum_name": "North American Datum 1983", "projected_crs_name": "North_America_Albers_Equal_Area_Conic", "grid_mapping_name": "albers_conical_equal_area", "standard_parallel": (20.0, 60.0), "latitude_of_projection_origin": 40.0, "longitude_of_central_meridian": -96.0, "false_easting": 0.0, "false_northing": 0.0, } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_azimuthal_equidistant(): crs = CRS("ESRI:54032") expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "WGS 84", "horizontal_datum_name": "World Geodetic System 1984", "projected_crs_name": "World_Azimuthal_Equidistant", "grid_mapping_name": "azimuthal_equidistant", "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 0.0, "false_easting": 0.0, "false_northing": 0.0, } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip expected_cf["horizontal_datum_name"] = "World Geodetic System 1984 ensemble" _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_lambert_azimuthal_equal_area(): crs = ProjectedCRS(conversion=LambertAzimuthalEqualAreaConversion(1, 2, 3, 4)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "lambert_azimuthal_equal_area", "latitude_of_projection_origin": 1.0, "longitude_of_projection_origin": 2.0, "false_easting": 3.0, "false_northing": 4.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_lambert_cylindrical_equal_area(): crs = ProjectedCRS(conversion=LambertCylindricalEqualAreaConversion(1, 2, 3, 4)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "lambert_cylindrical_equal_area", "standard_parallel": 1.0, "longitude_of_central_meridian": 2.0, "false_easting": 3.0, "false_northing": 4.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_mercator_a(): crs = ProjectedCRS(conversion=MercatorAConversion(0, 2, 3, 4)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "mercator", "standard_parallel": 0.0, "longitude_of_projection_origin": 2.0, "false_easting": 3.0, "false_northing": 4.0, "scale_factor_at_projection_origin": 1.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_orthographic(): crs = ProjectedCRS(conversion=OrthographicConversion(1, 2, 3, 4)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "orthographic", "latitude_of_projection_origin": 1.0, "longitude_of_projection_origin": 2.0, "false_easting": 3.0, "false_northing": 4.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_polar_stereographic_a(): crs = ProjectedCRS(conversion=PolarStereographicAConversion(90, 1, 2, 3)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "polar_stereographic", "latitude_of_projection_origin": 90.0, "straight_vertical_longitude_from_pole": 1.0, "false_easting": 2.0, "false_northing": 3.0, "scale_factor_at_projection_origin": 1.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_polar_stereographic_b(): crs = ProjectedCRS(conversion=PolarStereographicBConversion(0, 1, 2, 3)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "polar_stereographic", "standard_parallel": 0.0, "straight_vertical_longitude_from_pole": 1.0, "false_easting": 2.0, "false_northing": 3.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_stereographic(): crs = ProjectedCRS(conversion=StereographicConversion(0, 1, 2, 3)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "stereographic", "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 1.0, "false_easting": 2.0, "false_northing": 3.0, "scale_factor_at_projection_origin": 1.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_sinusoidal(): crs = ProjectedCRS(conversion=SinusoidalConversion(0, 1, 2)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "sinusoidal", "longitude_of_projection_origin": 0.0, "false_easting": 1.0, "false_northing": 2.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_vertical_perspective(): crs = ProjectedCRS(conversion=VerticalPerspectiveConversion(50, 0, 1, 0, 2, 3)) expected_cf = { "semi_major_axis": 6378137.0, "semi_minor_axis": crs.ellipsoid.semi_minor_metre, "inverse_flattening": crs.ellipsoid.inverse_flattening, "reference_ellipsoid_name": "WGS 84", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "horizontal_datum_name": "World Geodetic System 1984 ensemble", "grid_mapping_name": "vertical_perspective", "perspective_point_height": 50.0, "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 1.0, "false_easting": 2.0, "false_northing": 3.0, "geographic_crs_name": "undefined", "projected_crs_name": "undefined", } cf_dict = crs.to_cf() assert cf_dict.pop("crs_wkt").startswith("PROJCRS[") assert cf_dict == expected_cf # test roundtrip _test_roundtrip(expected_cf, "PROJCRS[") # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "metre", }, ] def test_build_custom_datum(): cf_dict = { "semi_major_axis": 6370997.0, "semi_minor_axis": 6370997.0, "inverse_flattening": 0.0, "reference_ellipsoid_name": "Normal Sphere (r=6370997)", "longitude_of_prime_meridian": 1.0, "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 13.809602948622212, "azimuth_of_central_line": 8.998112717187938, "scale_factor_at_projection_origin": 1.0, "false_easting": 0.0, "false_northing": 0.0, } crs = CRS.from_cf(cf_dict) assert crs.datum.name == "undefined" assert crs.ellipsoid.name == "Normal Sphere (r=6370997)" assert crs.prime_meridian.name == "undefined" assert crs.prime_meridian.longitude == 1 def test_build_custom_datum__default_prime_meridian(): cf_dict = { "semi_major_axis": 6370997.0, "semi_minor_axis": 6370997.0, "inverse_flattening": 0.0, "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 13.809602948622212, "azimuth_of_central_line": 8.998112717187938, "scale_factor_at_projection_origin": 1.0, "false_easting": 0.0, "false_northing": 0.0, } crs = CRS.from_cf(cf_dict) assert crs.datum.name == "undefined" assert crs.ellipsoid.name == "undefined" assert crs.prime_meridian.name == "Greenwich" assert crs.prime_meridian.longitude == 0 def test_build_custom_datum__default_ellipsoid(): cf_dict = { "prime_meridian_name": "Paris", "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": 13.809602948622212, "azimuth_of_central_line": 8.998112717187938, "scale_factor_at_projection_origin": 1.0, "false_easting": 0.0, "false_northing": 0.0, } crs = CRS.from_cf(cf_dict) assert crs.datum.name == "undefined" assert crs.ellipsoid.name == "WGS 84" assert crs.prime_meridian.name == "Paris" assert str(crs.prime_meridian.longitude).startswith("2.") def test_cartesian_cs(): unit = {"type": "LinearUnit", "name": "US Survey Foot", "conversion_factor": 0.3048} cartesian_cs = { "type": "CoordinateSystem", "subtype": "Cartesian", "axis": [ {"name": "Easting", "abbreviation": "E", "direction": "east", "unit": unit}, { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": unit, }, ], } crs = CRS.from_cf( { "grid_mapping_name": "transverse_mercator", "semi_major_axis": 6377563.396, "inverse_flattening": 299.3249646, "longitude_of_prime_meridian": 0.0, "latitude_of_projection_origin": 49.0, "longitude_of_central_meridian": -2.0, "scale_factor_at_central_meridian": 0.9996012717, "false_easting": 400000.0, "false_northing": -100000.0, }, cartesian_cs=cartesian_cs, ) json_dict = crs.coordinate_system.to_json_dict() json_dict.pop("$schema") assert json_dict == cartesian_cs # test coordinate system assert crs.cs_to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "0.3048 metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "0.3048 metre", }, ] def test_ellipsoidal_cs(): ellipsoidal_cs = { "type": "CoordinateSystem", "subtype": "ellipsoidal", "axis": [ { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": "degree", }, { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": "degree", }, ], } crs = CRS.from_cf( dict( grid_mapping_name="latitude_longitude", semi_major_axis=6378137.0, inverse_flattening=298.257223, ), ellipsoidal_cs=ellipsoidal_cs, ) json_dict = crs.coordinate_system.to_json_dict() json_dict.pop("$schema") assert json_dict == ellipsoidal_cs # test coordinate system assert crs.cs_to_cf() == [ { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, ] def test_ellipsoidal_cs__from_name(): ellipsoidal_cs = { "type": "CoordinateSystem", "subtype": "ellipsoidal", "axis": [ { "name": "Longitude", "abbreviation": "lon", "direction": "east", "unit": "degree", }, { "name": "Latitude", "abbreviation": "lat", "direction": "north", "unit": "degree", }, ], } crs = CRS.from_cf( dict(grid_mapping_name="latitude_longitude", geographic_crs_name="WGS 84"), ellipsoidal_cs=ellipsoidal_cs, ) json_dict = crs.coordinate_system.to_json_dict() json_dict.pop("$schema") assert json_dict == ellipsoidal_cs # test coordinate system assert crs.cs_to_cf() == [ { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, ] def test_export_compound_crs_cs(): unit = {"type": "LinearUnit", "name": "US Survey Foot", "conversion_factor": 0.3048} cartesian_cs = { "type": "CoordinateSystem", "subtype": "Cartesian", "axis": [ { "name": "Northing", "abbreviation": "N", "direction": "north", "unit": unit, }, {"name": "Easting", "abbreviation": "E", "direction": "east", "unit": unit}, ], } vertical_cs = { "type": "CoordinateSystem", "subtype": "vertical", "axis": [ { "name": "Gravity-related height", "abbreviation": "H", "direction": "up", "unit": unit, } ], } crs = CRS.from_cf( { "semi_major_axis": 6378388.0, "semi_minor_axis": 6356911.9461279465, "inverse_flattening": 297.0, "reference_ellipsoid_name": "International 1924", "longitude_of_prime_meridian": 0.0, "prime_meridian_name": "Greenwich", "geographic_crs_name": "KKJ", "horizontal_datum_name": "Kartastokoordinaattijarjestelma (1966)", "projected_crs_name": "KKJ / Finland Uniform Coordinate System", "grid_mapping_name": "transverse_mercator", "latitude_of_projection_origin": 0.0, "longitude_of_central_meridian": 27.0, "false_easting": 3500000.0, "false_northing": 0.0, "scale_factor_at_central_meridian": 1.0, "geopotential_datum_name": "Helsinki 1960", }, cartesian_cs=cartesian_cs, vertical_cs=vertical_cs, ) cartesian_json_dict = crs.sub_crs_list[0].coordinate_system.to_json_dict() cartesian_json_dict.pop("$schema") vertical_json_dict = crs.sub_crs_list[1].coordinate_system.to_json_dict() vertical_json_dict.pop("$schema") assert cartesian_json_dict == cartesian_cs assert vertical_json_dict == vertical_cs # test coordinate system assert crs.cs_to_cf() == [ { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "0.3048 metre", }, { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "0.3048 metre", }, { "standard_name": "height_above_reference_ellipsoid", "long_name": "Gravity-related height", "units": "0.3048 metre", "positive": "up", "axis": "Z", }, ] def test_ellipsoidal_cs__geodetic(): crs = CRS.from_epsg(4326) assert crs.cs_to_cf() == [ { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, ] def test_3d_ellipsoidal_cs_depth(): crs = CRS( { "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", "type": "GeographicCRS", "name": "WGS 84 (geographic 3D)", "datum": { "type": "GeodeticReferenceFrame", "name": "World Geodetic System 1984 ensemble", "ellipsoid": { "name": "WGS 84", "semi_major_axis": 6378137, "inverse_flattening": 298.257223563, }, }, "coordinate_system": { "subtype": "ellipsoidal", "axis": [ { "name": "Geodetic latitude", "abbreviation": "Lat", "direction": "north", "unit": { "type": "AngularUnit", "name": "degree minute second hemisphere", "conversion_factor": 0.0174532925199433, }, }, { "name": "Geodetic longitude", "abbreviation": "Long", "direction": "east", "unit": { "type": "AngularUnit", "name": "degree minute second hemisphere", "conversion_factor": 0.0174532925199433, }, }, { "name": "Ellipsoidal depth", "abbreviation": "d", "direction": "down", "unit": "metre", }, ], }, "area": "World", "bbox": { "south_latitude": -90, "west_longitude": -180, "north_latitude": 90, "east_longitude": 180, }, } ) assert crs.cs_to_cf() == [ { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, { "standard_name": "height_above_reference_ellipsoid", "long_name": "Ellipsoidal depth", "units": "metre", "positive": "down", "axis": "Z", }, ] def test_decimal_year_temporal_crs__coordinate_system(): crs = CRS( 'TIMECRS["Decimal Years CE",\n' ' TDATUM["Common Era",\n' ' CALENDAR["proleptic Gregorian"],\n' " TIMEORIGIN[0000]],\n" " CS[TemporalMeasure,1],\n" ' AXIS["decimal years (a)",future,\n' ' TIMEUNIT["year"]]]' ) assert crs.cs_to_cf() == [ { "standard_name": "time", "long_name": "time", "calendar": "proleptic_gregorian", "units": "year since 0000-01-01", "axis": "T", } ] def test_datetime_temporal_crs__coordinate_system(): crs = CRS( "TIMECRS[“DateTime”," "TDATUM[“Gregorian Calendar”]," 'CS[TemporalDateTime,1],AXIS["Time (T)",future]]' ) assert crs.cs_to_cf() == [ { "standard_name": "time", "long_name": "time", "calendar": "proleptic_gregorian", "axis": "T", } ] def test_count_temporal_crs__coordinate_system(): crs = CRS( "TIMECRS[“Calendar hours from 1979-12-29”," "TDATUM[“29 December 1979”,TIMEORIGIN[1979-12-29T00Z]]," "CS[TemporalCount,1],AXIS[“Time”,future,TIMEUNIT[“hour”]]]" ) assert crs.cs_to_cf() == [ { "standard_name": "time", "long_name": "time", "calendar": "proleptic_gregorian", "units": "hour since 1979-12-29T00", "axis": "T", } ] def test_unix_temporal_crs__coordinate_system(): crs = CRS( "TIMECRS[“Unix time”," "TDATUM[“Unix epoch”,TIMEORIGIN[1970-01-01T00:00:00Z]]," "CS[TemporalCount,1],AXIS[“Time”,future,TIMEUNIT[“second”]]]" ) assert crs.cs_to_cf() == [ { "standard_name": "time", "long_name": "time", "calendar": "proleptic_gregorian", "units": "second since 1970-01-01T00:00:00", "axis": "T", } ] def test_milisecond_temporal_crs__coordinate_system(): crs = CRS( 'TIMECRS["GPS milliseconds",' 'TDATUM["GPS time origin",TIMEORIGIN[1980-01-01T00:00:00.0Z]],' "CS[TemporalCount,1]," 'AXIS["(T)",future,TIMEUNIT["millisecond",0.001]]]' ) assert crs.cs_to_cf() == [ { "standard_name": "time", "long_name": "time", "calendar": "proleptic_gregorian", "units": "millisecond since 1980-01-01T00:00:00.0", "axis": "T", } ] pyproj-3.7.1/test/crs/test_crs_coordinate_operation.py000066400000000000000000000637011475425760300233260ustar00rootroot00000000000000import pytest from numpy.testing import assert_almost_equal from pyproj.crs import GeographicCRS from pyproj.crs.coordinate_operation import ( AlbersEqualAreaConversion, AzimuthalEquidistantConversion, EquidistantCylindricalConversion, GeostationarySatelliteConversion, HotineObliqueMercatorBConversion, LambertAzimuthalEqualAreaConversion, LambertConformalConic1SPConversion, LambertConformalConic2SPConversion, LambertCylindricalEqualAreaConversion, LambertCylindricalEqualAreaScaleConversion, MercatorAConversion, MercatorBConversion, OrthographicConversion, PlateCarreeConversion, PolarStereographicAConversion, PolarStereographicBConversion, PoleRotationNetCDFCFConversion, RotatedLatitudeLongitudeConversion, SinusoidalConversion, StereographicConversion, ToWGS84Transformation, TransverseMercatorConversion, UTMConversion, VerticalPerspectiveConversion, ) from pyproj.exceptions import CRSError from test.conftest import PROJ_GTE_95 def _to_dict(operation): param_dict = {} for param in operation.params: param_dict[param.name] = param.value return param_dict def test_albers_equal_area_operation__defaults(): aeaop = AlbersEqualAreaConversion( latitude_first_parallel=1, latitude_second_parallel=2 ) assert aeaop.name == "unknown" assert aeaop.method_name == "Albers Equal Area" assert _to_dict(aeaop) == { "Easting at false origin": 0.0, "Latitude of 1st standard parallel": 1.0, "Latitude of 2nd standard parallel": 2.0, "Latitude of false origin": 0.0, "Longitude of false origin": 0.0, "Northing at false origin": 0.0, } def test_albers_equal_area_operation(): aeaop = AlbersEqualAreaConversion( latitude_first_parallel=1, latitude_second_parallel=2, latitude_false_origin=3, longitude_false_origin=4, easting_false_origin=5, northing_false_origin=6, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Albers Equal Area" assert _to_dict(aeaop) == { "Easting at false origin": 5.0, "Latitude of 1st standard parallel": 1.0, "Latitude of 2nd standard parallel": 2.0, "Latitude of false origin": 3.0, "Longitude of false origin": 4.0, "Northing at false origin": 6.0, } def test_azimuthal_equidistant_operation__defaults(): aeop = AzimuthalEquidistantConversion() assert aeop.name == "unknown" assert aeop.method_name == "Modified Azimuthal Equidistant" assert _to_dict(aeop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_azimuthal_equidistant_operation(): aeop = AzimuthalEquidistantConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert aeop.name == "unknown" assert aeop.method_name == "Modified Azimuthal Equidistant" assert _to_dict(aeop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_geostationary_operation__defaults(): geop = GeostationarySatelliteConversion(sweep_angle_axis="x", satellite_height=10) assert geop.name == "unknown" assert geop.method_name == "Geostationary Satellite (Sweep X)" assert _to_dict(geop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Satellite height": 10.0, } def test_geostationary_operation(): with pytest.warns(UserWarning): geop = GeostationarySatelliteConversion( sweep_angle_axis="y", satellite_height=11, latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert geop.name == "unknown" assert geop.method_name == "Geostationary Satellite (Sweep Y)" assert _to_dict(geop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Satellite height": 11.0, } def test_geostationary_operation__invalid_sweep(): with pytest.raises(CRSError): GeostationarySatelliteConversion(sweep_angle_axis="P", satellite_height=10) def test_lambert_azimuthal_equal_area_operation__defaults(): aeop = LambertAzimuthalEqualAreaConversion() assert aeop.name == "unknown" assert aeop.method_name == "Lambert Azimuthal Equal Area" assert _to_dict(aeop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_lambert_azimuthal_equal_area_operation(): aeop = LambertAzimuthalEqualAreaConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert aeop.name == "unknown" assert aeop.method_name == "Lambert Azimuthal Equal Area" assert _to_dict(aeop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_lambert_conformat_conic_2sp_operation__defaults(): aeaop = LambertConformalConic2SPConversion( latitude_first_parallel=1, latitude_second_parallel=2 ) assert aeaop.name == "unknown" assert aeaop.method_name == "Lambert Conic Conformal (2SP)" assert _to_dict(aeaop) == { "Easting at false origin": 0.0, "Latitude of 1st standard parallel": 1.0, "Latitude of 2nd standard parallel": 2.0, "Latitude of false origin": 0.0, "Longitude of false origin": 0.0, "Northing at false origin": 0.0, } def test_lambert_conformat_conic_2sp_operation(): aeaop = LambertConformalConic2SPConversion( latitude_first_parallel=1, latitude_second_parallel=2, latitude_false_origin=3, longitude_false_origin=4, easting_false_origin=5, northing_false_origin=6, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Lambert Conic Conformal (2SP)" assert _to_dict(aeaop) == { "Easting at false origin": 5.0, "Latitude of 1st standard parallel": 1.0, "Latitude of 2nd standard parallel": 2.0, "Latitude of false origin": 3.0, "Longitude of false origin": 4.0, "Northing at false origin": 6.0, } def test_lambert_conformat_conic_1sp_operation__defaults(): aeaop = LambertConformalConic1SPConversion() assert aeaop.name == "unknown" assert aeaop.method_name == "Lambert Conic Conformal (1SP)" assert _to_dict(aeaop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Scale factor at natural origin": 1.0, } def test_lambert_conformat_conic_1sp_operation(): aeaop = LambertConformalConic1SPConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Lambert Conic Conformal (1SP)" assert _to_dict(aeaop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Scale factor at natural origin": 0.5, } def test_lambert_cylindrical_area_operation__defaults(): lceaop = LambertCylindricalEqualAreaConversion() assert lceaop.name == "unknown" assert lceaop.method_name == "Lambert Cylindrical Equal Area" assert _to_dict(lceaop) == { "Latitude of 1st standard parallel": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_lambert_cylindrical_equal_area_operation(): lceaop = LambertCylindricalEqualAreaConversion( latitude_first_parallel=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert lceaop.name == "unknown" assert lceaop.method_name == "Lambert Cylindrical Equal Area" assert _to_dict(lceaop) == { "Latitude of 1st standard parallel": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_mercator_a_operation__defaults(): aeaop = MercatorAConversion() assert aeaop.name == "unknown" assert aeaop.method_name == "Mercator (variant A)" assert _to_dict(aeaop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Scale factor at natural origin": 1.0, } def test_mercator_a_operation(): aeaop = MercatorAConversion( latitude_natural_origin=0, longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Mercator (variant A)" assert _to_dict(aeaop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Scale factor at natural origin": 0.5, } def test_mercator_a_operation__invalid_lat0(): with pytest.raises(CRSError): MercatorAConversion(latitude_natural_origin=1) def test_mercator_b_operation__defaults(): lceaop = MercatorBConversion() assert lceaop.name == "unknown" assert lceaop.method_name == "Mercator (variant B)" assert _to_dict(lceaop) == { "Latitude of 1st standard parallel": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_mercator_b_operation(): lceaop = MercatorBConversion( latitude_first_parallel=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert lceaop.name == "unknown" assert lceaop.method_name == "Mercator (variant B)" assert _to_dict(lceaop) == { "Latitude of 1st standard parallel": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_hotline_oblique_mercator_b_operation__defaults(): hop = HotineObliqueMercatorBConversion( latitude_projection_centre=0, longitude_projection_centre=0, azimuth_projection_centre=0, angle_from_rectified_to_skew_grid=0, ) assert hop.name == "unknown" assert hop.method_name == "Hotine Oblique Mercator (variant B)" assert _to_dict(hop) == { "Latitude of projection centre": 0.0, "Longitude of projection centre": 0.0, ( "Azimuth at projection centre" if PROJ_GTE_95 else "Azimuth of initial line" ): 0.0, "Angle from Rectified to Skew Grid": 0.0, ( "Scale factor at projection centre" if PROJ_GTE_95 else "Scale factor on initial line" ): 1.0, "Easting at projection centre": 0.0, "Northing at projection centre": 0.0, } def test_hotline_oblique_mercator_b_operation(): hop = HotineObliqueMercatorBConversion( latitude_projection_centre=1, longitude_projection_centre=2, azimuth_projection_centre=3, angle_from_rectified_to_skew_grid=4, scale_factor_projection_centre=0.5, easting_projection_centre=6, northing_projection_centre=7, ) assert hop.name == "unknown" assert hop.method_name == "Hotine Oblique Mercator (variant B)" assert _to_dict(hop) == { "Latitude of projection centre": 1.0, "Longitude of projection centre": 2.0, ( "Azimuth at projection centre" if PROJ_GTE_95 else "Azimuth of initial line" ): 3.0, "Angle from Rectified to Skew Grid": 4.0, ( "Scale factor at projection centre" if PROJ_GTE_95 else "Scale factor on initial line" ): 0.5, "Easting at projection centre": 6.0, "Northing at projection centre": 7.0, } def test_hotline_oblique_mercator_b_operation__deprecated_kwargs(): with pytest.warns(FutureWarning): hop = HotineObliqueMercatorBConversion( latitude_projection_centre=1, longitude_projection_centre=2, azimuth_initial_line=3, angle_from_rectified_to_skew_grid=4, scale_factor_on_initial_line=0.5, easting_projection_centre=6, northing_projection_centre=7, ) assert hop.name == "unknown" assert hop.method_name == "Hotine Oblique Mercator (variant B)" assert _to_dict(hop) == { "Latitude of projection centre": 1.0, "Longitude of projection centre": 2.0, ( "Azimuth at projection centre" if PROJ_GTE_95 else "Azimuth of initial line" ): 3.0, "Angle from Rectified to Skew Grid": 4.0, ( "Scale factor at projection centre" if PROJ_GTE_95 else "Scale factor on initial line" ): 0.5, "Easting at projection centre": 6.0, "Northing at projection centre": 7.0, } def test_hotline_oblique_mercator_b_operation__missing_azimuth(): with pytest.raises(ValueError): HotineObliqueMercatorBConversion( latitude_projection_centre=1, longitude_projection_centre=2, angle_from_rectified_to_skew_grid=4, ) def test_hotline_oblique_mercator_b_operation__duplicate_azimuth(): with pytest.raises(ValueError): HotineObliqueMercatorBConversion( latitude_projection_centre=1, longitude_projection_centre=2, angle_from_rectified_to_skew_grid=4, azimuth_initial_line=3, azimuth_projection_centre=3, ) def test_hotline_oblique_mercator_b_operation__duplicate_scale_factor(): with pytest.raises(ValueError): HotineObliqueMercatorBConversion( latitude_projection_centre=1, longitude_projection_centre=2, angle_from_rectified_to_skew_grid=4, azimuth_projection_centre=3, scale_factor_on_initial_line=0.5, scale_factor_projection_centre=0.5, ) def test_orthographic_operation__defaults(): aeop = OrthographicConversion() assert aeop.name == "unknown" assert aeop.method_name == "Orthographic" assert _to_dict(aeop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_orthographic_operation(): aeop = OrthographicConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, ) assert aeop.name == "unknown" assert aeop.method_name == "Orthographic" assert _to_dict(aeop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_polar_stereographic_a_operation__defaults(): aeaop = PolarStereographicAConversion(90) assert aeaop.name == "unknown" assert aeaop.method_name == "Polar Stereographic (variant A)" assert _to_dict(aeaop) == { "Latitude of natural origin": 90.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Scale factor at natural origin": 1.0, } def test_polar_stereographic_a_operation(): aeaop = PolarStereographicAConversion( latitude_natural_origin=-90, longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Polar Stereographic (variant A)" assert _to_dict(aeaop) == { "Latitude of natural origin": -90.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Scale factor at natural origin": 0.5, } def test_polar_stereographic_b_operation__defaults(): aeop = PolarStereographicBConversion() assert aeop.name == "unknown" assert aeop.method_name == "Polar Stereographic (variant B)" assert _to_dict(aeop) == { "Latitude of standard parallel": 0.0, "Longitude of origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_polar_stereographic_b_operation(): aeop = PolarStereographicBConversion( latitude_standard_parallel=1, longitude_origin=2, false_easting=3, false_northing=4, ) assert aeop.name == "unknown" assert aeop.method_name == "Polar Stereographic (variant B)" assert _to_dict(aeop) == { "Latitude of standard parallel": 1.0, "Longitude of origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_sinusoidal_operation__defaults(): aeop = SinusoidalConversion() assert aeop.name == "unknown" assert aeop.method_name == "Sinusoidal" assert _to_dict(aeop) == { "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_sinusoidal_operation(): aeop = SinusoidalConversion( longitude_natural_origin=2, false_easting=3, false_northing=4 ) assert aeop.name == "unknown" assert aeop.method_name == "Sinusoidal" assert _to_dict(aeop) == { "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } def test_stereographic_operation__defaults(): aeaop = StereographicConversion() assert aeaop.name == "unknown" assert aeaop.method_name == "Stereographic" assert _to_dict(aeaop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Scale factor at natural origin": 1.0, } def test_stereographic_operation(): aeaop = StereographicConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Stereographic" assert _to_dict(aeaop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Scale factor at natural origin": 0.5, } def test_utm_operation__defaults(): aeop = UTMConversion(zone=2) assert aeop.name == "UTM zone 2N" assert aeop.method_name == "Transverse Mercator" def test_utm_operation(): aeop = UTMConversion(zone=2, hemisphere="s") assert aeop.name == "UTM zone 2S" assert aeop.method_name == "Transverse Mercator" def test_transverse_mercator_operation__defaults(): aeaop = TransverseMercatorConversion() assert aeaop.name == "unknown" assert aeaop.method_name == "Transverse Mercator" assert _to_dict(aeaop) == { "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, "Scale factor at natural origin": 1.0, } def test_transverse_mercator_operation(): aeaop = TransverseMercatorConversion( latitude_natural_origin=1, longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Transverse Mercator" assert _to_dict(aeaop) == { "Latitude of natural origin": 1.0, "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, "Scale factor at natural origin": 0.5, } def test_vertical_perspective_operation__defaults(): aeaop = VerticalPerspectiveConversion(viewpoint_height=10) assert aeaop.name == "unknown" assert aeaop.method_name == "Vertical Perspective" assert _to_dict(aeaop) == { "Latitude of topocentric origin": 0.0, "Longitude of topocentric origin": 0.0, "Ellipsoidal height of topocentric origin": 0.0, "Viewpoint height": 10.0, "False easting": 0.0, "False northing": 0.0, } def test_vertical_perspective_operation(): aeaop = VerticalPerspectiveConversion( viewpoint_height=10, latitude_topocentric_origin=1, longitude_topocentric_origin=2, false_easting=3, false_northing=4, ellipsoidal_height_topocentric_origin=5, ) assert aeaop.name == "unknown" assert aeaop.method_name == "Vertical Perspective" assert _to_dict(aeaop) == { "Latitude of topocentric origin": 1.0, "Longitude of topocentric origin": 2.0, "Ellipsoidal height of topocentric origin": 5.0, "Viewpoint height": 10.0, "False easting": 3.0, "False northing": 4.0, } def test_rotated_latitude_longitude_operation__defaults(): aeaop = RotatedLatitudeLongitudeConversion(o_lat_p=1, o_lon_p=2) assert aeaop.name == "unknown" assert aeaop.method_name == "PROJ ob_tran o_proj=longlat" assert _to_dict(aeaop) == {"o_lat_p": 1.0, "o_lon_p": 2.0, "lon_0": 0.0} def test_rotated_latitude_longitude_operation(): aeaop = RotatedLatitudeLongitudeConversion(o_lat_p=1, o_lon_p=2, lon_0=3) assert aeaop.name == "unknown" assert aeaop.method_name == "PROJ ob_tran o_proj=longlat" assert _to_dict(aeaop) == {"o_lat_p": 1.0, "o_lon_p": 2.0, "lon_0": 3.0} def test_pole_rotation_netcdf_cf_convention__defaults(): poleop = PoleRotationNetCDFCFConversion( grid_north_pole_latitude=1, grid_north_pole_longitude=2 ) assert poleop.name == "Pole rotation (netCDF CF convention)" assert poleop.method_name == "Pole rotation (netCDF CF convention)" assert _to_dict(poleop) == { "Grid north pole latitude (netCDF CF convention)": 1.0, "Grid north pole longitude (netCDF CF convention)": 2.0, "North pole grid longitude (netCDF CF convention)": 0.0, } def test_pole_rotation_netcdf_cf_convention(): poleop = PoleRotationNetCDFCFConversion( grid_north_pole_latitude=1, grid_north_pole_longitude=2, north_pole_grid_longitude=10, ) assert poleop.name == "Pole rotation (netCDF CF convention)" assert poleop.method_name == "Pole rotation (netCDF CF convention)" assert _to_dict(poleop) == { "Grid north pole latitude (netCDF CF convention)": 1.0, "Grid north pole longitude (netCDF CF convention)": 2.0, "North pole grid longitude (netCDF CF convention)": 10.0, } def test_lambert_cylindrical_equal_area_scale_operation__defaults(): lceaop = LambertCylindricalEqualAreaScaleConversion() assert lceaop.name == "unknown" assert lceaop.method_name == "Lambert Cylindrical Equal Area" assert _to_dict(lceaop) == { "Latitude of 1st standard parallel": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } def test_lambert_cylindrical_equal_area_scale_operation(): lceaop = LambertCylindricalEqualAreaScaleConversion( longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.999, ) assert lceaop.name == "unknown" assert lceaop.method_name == "Lambert Cylindrical Equal Area" op_dict = _to_dict(lceaop) assert_almost_equal( op_dict.pop("Latitude of 1st standard parallel"), 2.57, decimal=2 ) assert op_dict == { "Longitude of natural origin": 2.0, "False easting": 3.0, "False northing": 4.0, } @pytest.mark.parametrize( "eqc_class", [EquidistantCylindricalConversion, PlateCarreeConversion] ) def test_equidistant_cylindrical_conversion__defaults(eqc_class): eqc = eqc_class() assert eqc.name == "unknown" assert eqc.method_name == "Equidistant Cylindrical" assert _to_dict(eqc) == { "Latitude of 1st standard parallel": 0.0, "Latitude of natural origin": 0.0, "Longitude of natural origin": 0.0, "False easting": 0.0, "False northing": 0.0, } @pytest.mark.parametrize( "eqc_class", [EquidistantCylindricalConversion, PlateCarreeConversion] ) def test_equidistant_cylindrical_conversion(eqc_class): eqc = eqc_class( latitude_first_parallel=1.0, latitude_natural_origin=2.0, longitude_natural_origin=3.0, false_easting=4.0, false_northing=5.0, ) assert eqc.name == "unknown" assert eqc.method_name == "Equidistant Cylindrical" assert _to_dict(eqc) == { "Latitude of 1st standard parallel": 1.0, "Latitude of natural origin": 2.0, "Longitude of natural origin": 3.0, "False easting": 4.0, "False northing": 5.0, } def test_towgs84_transformation(): transformation = ToWGS84Transformation(GeographicCRS(), 1, 2, 3, 4, 5, 6, 7) assert transformation.towgs84 == [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0] assert _to_dict(transformation) == { "Scale difference": 7.0, "X-axis rotation": 4.0, "X-axis translation": 1.0, "Y-axis rotation": 5.0, "Y-axis translation": 2.0, "Z-axis rotation": 6.0, "Z-axis translation": 3.0, } def test_towgs84_transformation__defaults(): transformation = ToWGS84Transformation(GeographicCRS()) assert transformation.towgs84 == [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] assert _to_dict(transformation) == { "Scale difference": 0.0, "X-axis rotation": 0.0, "X-axis translation": 0.0, "Y-axis rotation": 0.0, "Y-axis translation": 0.0, "Z-axis rotation": 0.0, "Z-axis translation": 0.0, } pyproj-3.7.1/test/crs/test_crs_coordinate_system.py000066400000000000000000000167501475425760300226540ustar00rootroot00000000000000import pytest from pyproj.crs.coordinate_system import ( Cartesian2DCS, Ellipsoidal2DCS, Ellipsoidal3DCS, VerticalCS, ) from pyproj.crs.enums import ( Cartesian2DCSAxis, Ellipsoidal2DCSAxis, Ellipsoidal3DCSAxis, VerticalCSAxis, ) @pytest.mark.parametrize( "axis, direction, unit_name", [ ("UP", "up", "metre"), (VerticalCSAxis.UP, "up", "metre"), (VerticalCSAxis.UP_US_FT, "up", "US survey foot"), ("UP_FT", "up", "foot"), (VerticalCSAxis.DEPTH, "down", "metre"), (VerticalCSAxis.DEPTH_US_FT, "down", "US survey foot"), ("DEPTH_FT", "down", "foot"), (VerticalCSAxis.GRAVITY_HEIGHT_US_FT, "up", "US survey foot"), ("GRAVITY_HEIGHT_FT", "up", "foot"), ], ) def test_vertical_cs(axis, direction, unit_name): vcs = VerticalCS(axis=axis) assert len(vcs.axis_list) == 1 assert vcs.axis_list[0].direction == direction assert vcs.axis_list[0].unit_name == unit_name @pytest.mark.parametrize( "axis, name_0, direction_0, name_1, direction_1, unit", [ ("EASTING_NORTHING", "Easting", "east", "Northing", "north", "metre"), ( Cartesian2DCSAxis.NORTHING_EASTING, "Northing", "north", "Easting", "east", "metre", ), ("EASTING_NORTHING_FT", "Easting", "east", "Northing", "north", "foot"), ( Cartesian2DCSAxis.NORTHING_EASTING_FT, "Northing", "north", "Easting", "east", "foot", ), ( "EASTING_NORTHING_US_FT", "Easting", "east", "Northing", "north", "US survey foot", ), ( Cartesian2DCSAxis.NORTHING_EASTING_US_FT, "Northing", "north", "Easting", "east", "US survey foot", ), ( "NORTH_POLE_EASTING_SOUTH_NORTHING_SOUTH", "Easting", "south", "Northing", "south", "metre", ), ( Cartesian2DCSAxis.SOUTH_POLE_EASTING_NORTH_NORTHING_NORTH, "Easting", "north", "Northing", "north", "metre", ), ("WESTING_SOUTHING", "Easting", "west", "Northing", "south", "metre"), ], ) def test_cartesian_2d_cs(axis, name_0, direction_0, name_1, direction_1, unit): vcs = Cartesian2DCS(axis=axis) assert len(vcs.axis_list) == 2 assert vcs.axis_list[0].direction == direction_0 assert vcs.axis_list[0].name == name_0 assert vcs.axis_list[0].unit_name == unit assert vcs.axis_list[1].direction == direction_1 assert vcs.axis_list[1].name == name_1 assert vcs.axis_list[1].unit_name == unit @pytest.mark.parametrize( "axis, name_0, direction_0, name_1, direction_1", [ ( Ellipsoidal2DCSAxis.LONGITUDE_LATITUDE, "Longitude", "east", "Latitude", "north", ), ( Ellipsoidal2DCSAxis.LATITUDE_LONGITUDE, "Latitude", "north", "Longitude", "east", ), ], ) def test_ellipsoidal_2d_cs(axis, name_0, direction_0, name_1, direction_1): vcs = Ellipsoidal2DCS(axis=axis) assert len(vcs.axis_list) == 2 assert vcs.axis_list[0].direction == direction_0 assert vcs.axis_list[0].name == name_0 assert vcs.axis_list[0].unit_name == "degree" assert vcs.axis_list[1].direction == direction_1 assert vcs.axis_list[1].name == name_1 assert vcs.axis_list[1].unit_name == "degree" @pytest.mark.parametrize( "axis, name_0, direction_0, name_1, direction_1", [ ( Ellipsoidal3DCSAxis.LONGITUDE_LATITUDE_HEIGHT, "Longitude", "east", "Latitude", "north", ), ( Ellipsoidal3DCSAxis.LATITUDE_LONGITUDE_HEIGHT, "Latitude", "north", "Longitude", "east", ), ], ) def test_ellipsoidal_3d_cs(axis, name_0, direction_0, name_1, direction_1): vcs = Ellipsoidal3DCS(axis=axis) assert len(vcs.axis_list) == 3 assert vcs.axis_list[0].direction == direction_0 assert vcs.axis_list[0].name == name_0 assert vcs.axis_list[0].unit_name == "degree" assert vcs.axis_list[1].direction == direction_1 assert vcs.axis_list[1].name == name_1 assert vcs.axis_list[1].unit_name == "degree" assert vcs.axis_list[2].direction == "up" assert vcs.axis_list[2].name == "Ellipsoidal height" assert vcs.axis_list[2].unit_name == "metre" def test_ellipsoidal2dcs_to_cf(): ecs = Ellipsoidal2DCS(axis=Ellipsoidal2DCSAxis.LATITUDE_LONGITUDE) assert ecs.to_cf() == [ { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, ] def test_ellipsoidal3dcs_to_cf(): ecs = Ellipsoidal3DCS(axis=Ellipsoidal3DCSAxis.LONGITUDE_LATITUDE_HEIGHT) assert ecs.to_cf() == [ { "standard_name": "longitude", "long_name": "longitude coordinate", "units": "degrees_east", "axis": "X", }, { "standard_name": "latitude", "long_name": "latitude coordinate", "units": "degrees_north", "axis": "Y", }, { "standard_name": "height_above_reference_ellipsoid", "long_name": "Ellipsoidal height", "units": "metre", "positive": "up", "axis": "Z", }, ] def test_cartesian2dcs_ft_to_cf(): csft = Cartesian2DCS(axis=Cartesian2DCSAxis.NORTHING_EASTING_FT) assert csft.to_cf() == [ { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "0.3048 metre", }, { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "0.3048 metre", }, ] def test_cartesian2dcs_to_cf(): csm = Cartesian2DCS(axis=Cartesian2DCSAxis.EASTING_NORTHING_FT) assert csm.to_cf() == [ { "axis": "X", "long_name": "Easting", "standard_name": "projection_x_coordinate", "units": "0.3048 metre", }, { "axis": "Y", "long_name": "Northing", "standard_name": "projection_y_coordinate", "units": "0.3048 metre", }, ] def test_verticalcs_depth_to_cf(): vcs = VerticalCS(axis=VerticalCSAxis.DEPTH) assert vcs.to_cf() == [ { "standard_name": "height_above_reference_ellipsoid", "long_name": "Depth", "units": "metre", "positive": "down", "axis": "Z", } ] def test_verticalcs_height_to_cf(): vcs = VerticalCS(axis=VerticalCSAxis.GRAVITY_HEIGHT_US_FT) assert vcs.to_cf() == [ { "standard_name": "height_above_reference_ellipsoid", "long_name": "Gravity-related height", "units": "0.304800609601219 metre", "positive": "up", "axis": "Z", } ] pyproj-3.7.1/test/crs/test_crs_datum.py000066400000000000000000000041101475425760300202160ustar00rootroot00000000000000from numpy.testing import assert_almost_equal from pyproj.crs.datum import ( CustomDatum, CustomEllipsoid, CustomPrimeMeridian, Ellipsoid, PrimeMeridian, ) def test_custom_datum(): cd = CustomDatum() assert cd.ellipsoid.name == "WGS 84" assert cd.prime_meridian.name == "Greenwich" def test_custom_datum__input(): cd = CustomDatum( ellipsoid=Ellipsoid.from_epsg(7001), prime_meridian=PrimeMeridian.from_name("Lisbon"), ) assert cd.ellipsoid.name == "Airy 1830" assert cd.prime_meridian.name == "Lisbon" def test_custom_ellipsoid(): ce = CustomEllipsoid(semi_major_axis=6378137, inverse_flattening=298.257222101) assert ce.name == "undefined" assert ce.semi_major_metre == 6378137 assert ce.semi_minor_metre == 6356752.314140356 assert_almost_equal(ce.inverse_flattening, 298.257222101) assert sorted(ce.to_json_dict()) == [ "$schema", "inverse_flattening", "name", "semi_major_axis", "type", ] def test_custom_ellipsoid__minor(): ce = CustomEllipsoid( name="test", semi_major_axis=6378137, semi_minor_axis=6356752.314 ) assert ce.name == "test" assert ce.semi_major_metre == 6378137 assert ce.semi_minor_metre == 6356752.314 assert_almost_equal(ce.inverse_flattening, 298.25722014) assert sorted(ce.to_json_dict()) == [ "$schema", "name", "semi_major_axis", "semi_minor_axis", "type", ] def test_custom_ellipsoid__radius(): ce = CustomEllipsoid(radius=6378137) assert ce.name == "undefined" assert ce.semi_major_metre == 6378137 assert ce.semi_minor_metre == 6378137 assert ce.inverse_flattening == 0 assert sorted(ce.to_json_dict()) == ["$schema", "name", "radius", "type"] def test_custom_prime_meridian(): pm = CustomPrimeMeridian(longitude=2) assert pm.name == "undefined" assert pm.longitude == 2 def test_custom_prime_meridian__name(): pm = CustomPrimeMeridian(longitude=1, name="frank") assert pm.name == "frank" assert pm.longitude == 1 pyproj-3.7.1/test/crs/test_crs_json.py000066400000000000000000000105261475425760300200650ustar00rootroot00000000000000import pytest from pyproj.crs import ( CRS, CoordinateOperation, CoordinateSystem, Datum, Ellipsoid, PrimeMeridian, ) def test_crs_to_json_dict(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_dict = aeqd_crs.to_json_dict() assert json_dict["type"] == "ProjectedCRS" def test_crs_to_json(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = aeqd_crs.to_json() assert "ProjectedCRS" in json_data assert "\n" not in json_data def test_crs_to_json__pretty(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = aeqd_crs.to_json(pretty=True) assert "ProjectedCRS" in json_data assert json_data.startswith('{\n "') def test_crs_to_json__pretty__indenation(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = aeqd_crs.to_json(pretty=True, indentation=4) assert "ProjectedCRS" in json_data assert json_data.startswith('{\n "') def test_crs_from_json(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) assert CRS.from_json(aeqd_crs.to_json()) == aeqd_crs def test_crs_from_json_dict(): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) assert CRS.from_json_dict(aeqd_crs.to_json_dict()) == aeqd_crs @pytest.mark.parametrize( "property_name, expected_type", [ ("coordinate_operation", "Conversion"), ("datum", "GeodeticReferenceFrame"), ("ellipsoid", "Ellipsoid"), ("prime_meridian", "PrimeMeridian"), ("coordinate_system", "CoordinateSystem"), ], ) def test_properties_to_json(property_name, expected_type): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = getattr(aeqd_crs, property_name).to_json() assert expected_type in json_data assert "\n" not in json_data @pytest.mark.parametrize( "property_name, expected_type", [ ("coordinate_operation", "Conversion"), ("datum", "GeodeticReferenceFrame"), ("ellipsoid", "Ellipsoid"), ("prime_meridian", "PrimeMeridian"), ("coordinate_system", "CoordinateSystem"), ], ) def test_properties_to_json__pretty(property_name, expected_type): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = getattr(aeqd_crs, property_name).to_json(pretty=True) assert expected_type in json_data assert json_data.startswith('{\n "') @pytest.mark.parametrize( "property_name, expected_type", [ ("coordinate_operation", "Conversion"), ("datum", "GeodeticReferenceFrame"), ("ellipsoid", "Ellipsoid"), ("prime_meridian", "PrimeMeridian"), ("coordinate_system", "CoordinateSystem"), ], ) def test_properties_to_json__pretty__indentation(property_name, expected_type): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) json_data = getattr(aeqd_crs, property_name).to_json(pretty=True, indentation=4) assert expected_type in json_data assert json_data.startswith('{\n "') @pytest.mark.parametrize( "property_name, expected_type", [ ("coordinate_operation", "Conversion"), ("datum", "GeodeticReferenceFrame"), ("ellipsoid", "Ellipsoid"), ("prime_meridian", "PrimeMeridian"), ], ) def test_properties_to_json_dict(property_name, expected_type): aeqd_crs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5) assert getattr(aeqd_crs, property_name).to_json_dict()["type"] == expected_type @pytest.mark.parametrize( "property_name, init_class", [ ("coordinate_operation", CoordinateOperation), ("datum", Datum), ("ellipsoid", Ellipsoid), ("prime_meridian", PrimeMeridian), ], ) def test_properties_from_json_dict(property_name, init_class): prop = getattr(CRS.from_epsg(26915), property_name) assert init_class.from_json_dict(prop.to_json_dict()) == prop def test_coordinate_system_from_json_dict(): # separate test from other properties due to # https://github.com/OSGeo/PROJ/issues/1818 aeqd_cs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5).coordinate_system assert CoordinateSystem.from_json_dict(aeqd_cs.to_json_dict()) == aeqd_cs def test_coordinate_system_from_json(): # separate test from other properties due to # https://github.com/OSGeo/PROJ/issues/1818 aeqd_cs = CRS(proj="aeqd", lon_0=-80, lat_0=40.5).coordinate_system assert CoordinateSystem.from_json(aeqd_cs.to_json()) == aeqd_cs pyproj-3.7.1/test/crs/test_crs_maker.py000066400000000000000000000275301475425760300202160ustar00rootroot00000000000000import pytest from pyproj.crs import ( CRS, BoundCRS, CompoundCRS, CustomConstructorCRS, DerivedGeographicCRS, GeocentricCRS, GeographicCRS, ProjectedCRS, VerticalCRS, ) from pyproj.crs.coordinate_operation import ( AlbersEqualAreaConversion, LambertConformalConic2SPConversion, RotatedLatitudeLongitudeConversion, ToWGS84Transformation, TransverseMercatorConversion, UTMConversion, ) from pyproj.crs.coordinate_system import Cartesian2DCS, Ellipsoidal3DCS, VerticalCS from pyproj.crs.datum import CustomDatum from pyproj.crs.enums import VerticalCSAxis from pyproj.exceptions import CRSError from test.conftest import assert_can_pickle def assert_maker_inheritance_valid(new_crs, class_type): assert isinstance(new_crs, class_type) assert isinstance(new_crs.geodetic_crs, (type(None), CRS)) assert isinstance(new_crs.source_crs, (type(None), CRS)) assert isinstance(new_crs.target_crs, (type(None), CRS)) assert isinstance(new_crs.to_3d(), CRS) for sub_crs in new_crs.sub_crs_list: assert isinstance(sub_crs, CRS) def test_make_projected_crs(tmp_path): aeaop = AlbersEqualAreaConversion(0, 0) pc = ProjectedCRS(conversion=aeaop, name="Albers") assert pc.name == "Albers" assert pc.type_name == "Projected CRS" assert pc.coordinate_operation == aeaop assert_can_pickle(pc, tmp_path) def test_projected_crs__from_methods(): assert_maker_inheritance_valid(ProjectedCRS.from_epsg(6933), ProjectedCRS) assert_maker_inheritance_valid(ProjectedCRS.from_string("EPSG:6933"), ProjectedCRS) assert_maker_inheritance_valid( ProjectedCRS.from_proj4("+proj=aea +lat_1=1"), ProjectedCRS ) assert_maker_inheritance_valid( ProjectedCRS.from_user_input(ProjectedCRS.from_string("EPSG:6933")), ProjectedCRS, ) assert_maker_inheritance_valid( ProjectedCRS.from_json(CRS(6933).to_json()), ProjectedCRS ) assert_maker_inheritance_valid( ProjectedCRS.from_json_dict(CRS(6933).to_json_dict()), ProjectedCRS ) with pytest.raises(CRSError, match="Invalid type"): ProjectedCRS.from_epsg(4326) def test_make_geographic_crs(tmp_path): gc = GeographicCRS(name="WGS 84") assert gc.name == "WGS 84" assert gc.type_name == "Geographic 2D CRS" assert gc.to_authority() == ("OGC", "CRS84") assert_can_pickle(gc, tmp_path) def test_geographic_crs__from_methods(): assert_maker_inheritance_valid(GeographicCRS.from_epsg(4326), GeographicCRS) assert_maker_inheritance_valid( GeographicCRS.from_string("EPSG:4326"), GeographicCRS ) assert_maker_inheritance_valid( GeographicCRS.from_proj4("+proj=latlon"), GeographicCRS ) assert_maker_inheritance_valid( GeographicCRS.from_user_input(GeographicCRS.from_string("EPSG:4326")), GeographicCRS, ) assert_maker_inheritance_valid( GeographicCRS.from_json(CRS(4326).to_json()), GeographicCRS ) assert_maker_inheritance_valid( GeographicCRS.from_json_dict(CRS(4326).to_json_dict()), GeographicCRS ) with pytest.raises(CRSError, match="Invalid type"): GeographicCRS.from_epsg(6933) def test_make_geographic_3d_crs(): gcrs = GeographicCRS(ellipsoidal_cs=Ellipsoidal3DCS()) assert gcrs.type_name == "Geographic 3D CRS" assert gcrs.to_authority() == ("OGC", "CRS84h") def test_make_derived_geographic_crs(tmp_path): conversion = RotatedLatitudeLongitudeConversion(o_lat_p=0, o_lon_p=0) dgc = DerivedGeographicCRS(base_crs=GeographicCRS(), conversion=conversion) assert dgc.name == "undefined" assert dgc.type_name == "Derived Geographic 2D CRS" assert dgc.coordinate_operation == conversion assert dgc.is_derived assert_can_pickle(dgc, tmp_path) def test_derived_geographic_crs__from_methods(): crs_str = "+proj=ob_tran +o_proj=longlat +o_lat_p=0 +o_lon_p=0 +lon_0=0" with pytest.raises(CRSError, match="Invalid type Geographic 2D CRS"): DerivedGeographicCRS.from_epsg(4326) assert_maker_inheritance_valid( DerivedGeographicCRS.from_string(crs_str), DerivedGeographicCRS ) assert_maker_inheritance_valid( DerivedGeographicCRS.from_proj4(crs_str), DerivedGeographicCRS ) assert_maker_inheritance_valid( DerivedGeographicCRS.from_user_input(DerivedGeographicCRS.from_string(crs_str)), DerivedGeographicCRS, ) assert_maker_inheritance_valid( DerivedGeographicCRS.from_json(CRS(crs_str).to_json()), DerivedGeographicCRS ) assert_maker_inheritance_valid( DerivedGeographicCRS.from_json_dict(CRS(crs_str).to_json_dict()), DerivedGeographicCRS, ) def test_make_geocentric_crs(tmp_path): gc = GeocentricCRS(name="WGS 84") assert gc.name == "WGS 84" assert gc.is_geocentric assert gc.type_name == "Geocentric CRS" assert gc.to_authority() == ("EPSG", "4978") assert_can_pickle(gc, tmp_path) def test_geocentric_crs__from_methods(): assert_maker_inheritance_valid(GeocentricCRS.from_epsg(4978), GeocentricCRS) assert_maker_inheritance_valid( GeocentricCRS.from_string("EPSG:4978"), GeocentricCRS ) assert_maker_inheritance_valid( GeocentricCRS.from_proj4("+proj=geocent"), GeocentricCRS ) assert_maker_inheritance_valid( GeocentricCRS.from_user_input(GeocentricCRS.from_string("EPSG:4978")), GeocentricCRS, ) assert_maker_inheritance_valid( GeocentricCRS.from_json(CRS(4978).to_json()), GeocentricCRS ) assert_maker_inheritance_valid( GeocentricCRS.from_json_dict(CRS(4978).to_json_dict()), GeocentricCRS ) with pytest.raises(CRSError, match="Invalid type"): GeocentricCRS.from_epsg(6933) def test_vertical_crs(tmp_path): vc = VerticalCRS( name="NAVD88 height", datum="North American Vertical Datum 1988", geoid_model="GEOID12B", ) assert vc.name == "NAVD88 height" assert vc.type_name == "Vertical CRS" assert vc.coordinate_system == VerticalCS() assert vc.to_json_dict()["geoid_model"]["name"] == "GEOID12B" assert_can_pickle(vc, tmp_path) def test_vertical_crs__from_methods(): assert_maker_inheritance_valid(VerticalCRS.from_epsg(5703), VerticalCRS) assert_maker_inheritance_valid(VerticalCRS.from_string("EPSG:5703"), VerticalCRS) with pytest.raises(CRSError, match="Invalid type"): VerticalCRS.from_proj4("+proj=latlon") assert_maker_inheritance_valid( VerticalCRS.from_user_input(VerticalCRS.from_string("EPSG:5703")), VerticalCRS ) assert_maker_inheritance_valid( VerticalCRS.from_json(CRS(5703).to_json()), VerticalCRS ) assert_maker_inheritance_valid( VerticalCRS.from_json_dict(CRS(5703).to_json_dict()), VerticalCRS ) @pytest.mark.parametrize( "axis", [ VerticalCSAxis.UP, VerticalCSAxis.UP_FT, VerticalCSAxis.DEPTH, VerticalCSAxis.DEPTH_FT, VerticalCSAxis.GRAVITY_HEIGHT_FT, ], ) def test_vertical_crs__chance_cs_axis(axis): vc = VerticalCRS( name="NAVD88 height", datum="North American Vertical Datum 1988", vertical_cs=VerticalCS(axis=axis), ) assert vc.name == "NAVD88 height" assert vc.type_name == "Vertical CRS" assert vc.coordinate_system == VerticalCS(axis=axis) def test_compund_crs(tmp_path): vertcrs = VerticalCRS( name="NAVD88 height", datum="North American Vertical Datum 1988", vertical_cs=VerticalCS(), geoid_model="GEOID12B", ) projcrs = ProjectedCRS( name="NAD83 / Pennsylvania South", conversion=LambertConformalConic2SPConversion( latitude_false_origin=39.3333333333333, longitude_false_origin=-77.75, latitude_first_parallel=40.9666666666667, latitude_second_parallel=39.9333333333333, easting_false_origin=600000, northing_false_origin=0, ), geodetic_crs=GeographicCRS(datum="North American Datum 1983"), cartesian_cs=Cartesian2DCS(), ) compcrs = CompoundCRS( name="NAD83 / Pennsylvania South + NAVD88 height", components=[projcrs, vertcrs] ) assert compcrs.name == "NAD83 / Pennsylvania South + NAVD88 height" assert compcrs.type_name == "Compound CRS" assert compcrs.sub_crs_list[0].type_name == "Projected CRS" assert compcrs.sub_crs_list[1].type_name == "Vertical CRS" assert_can_pickle(compcrs, tmp_path) def test_compund_crs__from_methods(): crs = CompoundCRS.from_string("EPSG:4326+5773") with pytest.raises(CRSError, match="Invalid type"): CompoundCRS.from_epsg(4326) assert_maker_inheritance_valid(crs, CompoundCRS) with pytest.raises(CRSError, match="Invalid type"): CompoundCRS.from_proj4("+proj=longlat +datum=WGS84 +vunits=m") assert_maker_inheritance_valid(CompoundCRS.from_user_input(crs), CompoundCRS) assert_maker_inheritance_valid(CompoundCRS.from_json(crs.to_json()), CompoundCRS) assert_maker_inheritance_valid( CompoundCRS.from_json_dict(crs.to_json_dict()), CompoundCRS ) def test_bound_crs(tmp_path): proj_crs = ProjectedCRS(conversion=UTMConversion(12)) bound_crs = BoundCRS( source_crs=proj_crs, target_crs="WGS 84", transformation=ToWGS84Transformation( proj_crs.geodetic_crs, 1, 2, 3, 4, 5, 6, 7 ), ) assert bound_crs.type_name == "Bound CRS" assert bound_crs.source_crs.coordinate_operation.name == "UTM zone 12N" assert bound_crs.coordinate_operation.towgs84 == [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0] assert bound_crs.target_crs.name == "WGS 84" assert_can_pickle(bound_crs, tmp_path) def test_bound_crs__example(): proj_crs = ProjectedCRS( conversion=TransverseMercatorConversion( latitude_natural_origin=0, longitude_natural_origin=15, false_easting=2520000, false_northing=0, scale_factor_natural_origin=0.9996, ), geodetic_crs=GeographicCRS( datum=CustomDatum(ellipsoid="International 1924 (Hayford 1909, 1910)") ), ) bound_crs = BoundCRS( source_crs=proj_crs, target_crs="WGS 84", transformation=ToWGS84Transformation( proj_crs.geodetic_crs, -122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62 ), ) with pytest.warns(UserWarning): assert bound_crs.to_dict() == { "ellps": "intl", "k": 0.9996, "lat_0": 0, "lon_0": 15, "no_defs": None, "proj": "tmerc", "towgs84": [-122.74, -34.27, -22.83, -1.884, -3.4, -3.03, -15.62], "type": "crs", "units": "m", "x_0": 2520000, "y_0": 0, } def test_bound_crs_crs__from_methods(): crs_str = "+proj=latlon +towgs84=0,0,0" with pytest.raises(CRSError, match="Invalid type"): BoundCRS.from_epsg(4326) assert_maker_inheritance_valid(BoundCRS.from_string(crs_str), BoundCRS) assert_maker_inheritance_valid(BoundCRS.from_proj4(crs_str), BoundCRS) assert_maker_inheritance_valid( BoundCRS.from_user_input(BoundCRS.from_string(crs_str)), BoundCRS ) assert_maker_inheritance_valid(BoundCRS.from_json(CRS(crs_str).to_json()), BoundCRS) assert_maker_inheritance_valid( BoundCRS.from_json_dict(CRS(crs_str).to_json_dict()), BoundCRS ) def test_custom_constructor_crs__not_implemented(): class MyCustomInit(CustomConstructorCRS): def __init__(self, *, name: str): super().__init__(name) with pytest.raises(NotImplementedError): MyCustomInit.from_epsg(4326) def test_custom_constructor_crs(): class MyCustomInit(CustomConstructorCRS): _expected_types = ("Geographic 2D CRS",) def __init__(self, *, name: str): super().__init__(name) assert isinstance(MyCustomInit.from_epsg(4326), MyCustomInit) pyproj-3.7.1/test/sample.out000066400000000000000000000071171475425760300160610ustar00rootroot00000000000000output of test.py ================= proj4 library version = 4.49 4 crnrs of AWIPS grid 221: -145.5 0.99999999999 -68.3182659527 0.896932791646 -2.5660073938 46.352013053 148.638612551 46.6347160116 from GRIB docs (see http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html) -145.5 1.0 -68.318 0.897 -2.566 46.352 148.639 46.635 data in lists: compute lats/lons for all points on AWIPS 221 grid (349x277) max/min lons -179.998717833 179.99098295 max/min lats 0.896932791646 85.3340424275 took 0.19 secs data in python arrays: compute lats/lons for all points on AWIPS 221 grid (349x277) max/min lons -179.998715866 179.990981756 max/min lats 0.896931758891 85.3340414374 took 0.14 secs data in a Numeric/numarray array: compute lats/lons for all points on AWIPS 221 grid (349x277) max/min lons -179.998717833 179.99098295 max/min lats 0.896932791646 85.3340424275 took 0.07 secs max abs error for x 6.78119249642e-09 max abs error for y 6.78119249642e-09 took 0.07 secs compare output with sample.out now run test2.py to test pickling output of test2.py ================== do it again, from pickled instance ... compute lats/lons for all points on AWIPS 221 grid (349x277) max/min lons in radians -3.14157024122 3.14143525553 max/min lats in radians 0.0156544123584 1.48935998712 took 0.08 secs max abs error for x 6.83576217853e-09 max abs error for y 9.31322574615e-09 took 0.06 secs output of test_transform.py =========================== proj4 library version = 4.49 (-4.9489738955799112e-06, 4.5528711254405607e-06) (should be close to zero) (-3.203692330089325e-06, 1.8227847107255002e-06) (should close to zero) max/min x and y for awips218 grid 0.0 7472825.54 0.0 5205377.66 max/min x and y for awips218 grid in awips221 coordinates 2224130.9457 10705426.2541 673840.639255 6706800.54439 error for reverse transformation back to awips218 coords (should be close to zero) -4.88944351673e-09 5.58793544769e-09 -9.72067937255e-09 1.04773789644e-08 output of datum_shift.py ======================== proj4 library version = 4.49 WGS84-->UTM Trieste, Molo Sartorio WGS84: 13.759554722 45.647188611 52.799999999999997 Trieste, Molo Sartorio UTM33 (from IGM): 403340.97 5055597.17 Trieste, Molo Sartorio UTM33 (converted): 403340.96723675972 5055597.1755538993 52.799999999999997 Difference (meters): -0.0027632402488961816 0.0055538993328809738 0.0 WGS84-->Gauss-Boaga Trieste, Molo Sartorio Gauss-Boaga (from IGM): 2423346.99 5055619.87 Trieste, Molo Sartorio Gauss-Boaga (converted): 2423346.9950080249 5055619.899023423 Difference (meters): 0.0050080246292054653 0.029023422859609127 UTM-->WGS84 Trieste, Molo Sartorio UTM33 (converted): 403340.96723675972 5055597.1755538993 52.799999999999997 Trieste, Molo Sartorio WGS84 (converted back): 13.759554722 45.647188611 52.799999999999997 Difference (seconds): -1.18305365504e-09 -1.20223830891e-09 0.0 (m) Gauss-Boaga-->WGS84 Trieste, Molo Sartorio Gauss-Boaga (converted): 2423346.9950080249 5055619.899023423 0.1668358146915066 Trieste, Molo Sartorio WGS84 (converted back): 13.7595547 45.6471886096 0.0012277923978516671 Difference (seconds): -7.91165149394e-05 -4.8976630751e-06 -52.798772207602148 (m) UTM (from IGM) --> WGS84 Trieste, Molo Sartorio UTM33 (from IGM): 403340.97 5055597.17 Trieste, Molo Sartorio WGS84 (converted): 13.7595547586 45.6471885614 52.799999999999997 Difference (seconds): 0.000131600918962 -0.000178537351303 0.0 (m) Gauss-Boaga (from IGM) --> WGS84 Trieste, Molo Sartorio Gauss-Boaga (from IGM): 2423346.99 5055619.87 Trieste, Molo Sartorio WGS84 (converted): 13.7595546415 45.6471883478 Difference (seconds): -0.000289702808942 -0.00094763802565 pyproj-3.7.1/test/test_aoi.py000066400000000000000000000022021475425760300162160ustar00rootroot00000000000000import pytest from pyproj.aoi import AreaOfInterest, BBox def test_backwards_compatible_import_paths(): from pyproj.transformer import ( # noqa: F401 pylint: disable=unused-import AreaOfInterest, ) def test_contains(): assert BBox(1, 1, 4, 4).contains(BBox(2, 2, 3, 3)) def test_not_contains(): assert not BBox(1, 1, 4, 4).contains(BBox(2, 2, 5, 5)) def test_intersects(): assert BBox(1, 1, 4, 4).intersects(BBox(2, 2, 5, 5)) def test_not_intersects(): assert not BBox(1, 1, 4, 4).intersects(BBox(10, 10, 20, 20)) @pytest.mark.parametrize("aoi_class", [AreaOfInterest, BBox]) @pytest.mark.parametrize( "input", [ (None, None, None, None), (float("nan"), float("nan"), float("nan"), float("nan")), (None, 0, 0, 0), (float("nan"), 0, 0, 0), (0, None, 0, 0), (0, float("nan"), 0, 0), (0, 0, None, 0), (0, 0, float("nan"), 0), (0, 0, 0, None), (0, 0, 0, float("nan")), ], ) def test_null_input(aoi_class, input): with pytest.raises(ValueError, match="NaN or None values are not allowed."): aoi_class(*input) pyproj-3.7.1/test/test_awips221.py000066400000000000000000000076631475425760300170360ustar00rootroot00000000000000import array import numpy from numpy.testing import assert_allclose from pyproj import Proj, __proj_version__ try: from time import perf_counter except ImportError: from time import clock as perf_counter def test_awips221(): params = {} params["proj"] = "lcc" params["R"] = 6371200 params["lat_1"] = 50 params["lat_2"] = 50 params["lon_0"] = -107 nx = 349 ny = 277 dx = 32463.41 dy = dx # can either use a dict # awips221 = Proj(params) # or keyword args awips221 = Proj(proj="lcc", R=6371200, lat_1=50, lat_2=50, lon_0=-107) print("proj4 library version = ", __proj_version__) # AWIPS grid 221 parameters # (from http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html) llcrnrx, llcrnry = awips221(-145.5, 1.0) params["x_0"] = -llcrnrx params["y_0"] = -llcrnry awips221 = Proj(params) llcrnrx, llcrnry = awips221(-145.5, 1.0) # find 4 lon/lat crnrs of AWIPS grid 221. llcrnrx = 0.0 llcrnry = 0.0 lrcrnrx = dx * (nx - 1) lrcrnry = 0.0 ulcrnrx = 0.0 ulcrnry = dy * (ny - 1) urcrnrx = dx * (nx - 1) urcrnry = dy * (ny - 1) llcrnrlon, llcrnrlat = awips221(llcrnrx, llcrnry, inverse=True) lrcrnrlon, lrcrnrlat = awips221(lrcrnrx, lrcrnry, inverse=True) urcrnrlon, urcrnrlat = awips221(urcrnrx, urcrnry, inverse=True) ulcrnrlon, ulcrnrlat = awips221(ulcrnrx, ulcrnry, inverse=True) print("4 crnrs of AWIPS grid 221:") print(llcrnrlon, llcrnrlat) print(lrcrnrlon, lrcrnrlat) print(urcrnrlon, urcrnrlat) print(ulcrnrlon, ulcrnrlat) print("from GRIB docs") print("(see http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html)") print(" -145.5 1.0") print(" -68.318 0.897") print(" -2.566 46.352") print(" 148.639 46.635") # compute lons and lats for the whole AWIPS grid 221 (377x249). dx = (urcrnrx - llcrnrx) / (nx - 1) dy = (urcrnry - llcrnry) / (ny - 1) x = llcrnrx + dx * numpy.indices((ny, nx), "f")[1, :, :] y = llcrnry + dy * numpy.indices((ny, nx), "f")[0, :, :] t1 = perf_counter() lons, lats = awips221( numpy.ravel(x).tolist(), numpy.ravel(y).tolist(), inverse=True ) t2 = perf_counter() print("data in lists:") print(f"compute lats/lons for all points on AWIPS 221 grid ({nx}x{ny})") print("max/min lons") print(min(lons), max(lons)) print("max/min lats") print(min(lats), max(lats)) print("took", t2 - t1, "secs") xa = array.array("f", numpy.ravel(x).tolist()) ya = array.array("f", numpy.ravel(y).tolist()) t1 = perf_counter() lons, lats = awips221(xa, ya, inverse=True) t2 = perf_counter() print("data in python arrays:") print(f"compute lats/lons for all points on AWIPS 221 grid ({nx}x{ny})") print("max/min lons") print(min(lons), max(lons)) print("max/min lats") print(min(lats), max(lats)) print("took", t2 - t1, "secs") t1 = perf_counter() lons, lats = awips221(x, y, inverse=True) t2 = perf_counter() print("data in a numpy array:") print(f"compute lats/lons for all points on AWIPS 221 grid ({nx}x{ny})") print("max/min lons") print( numpy.minimum.reduce(numpy.ravel(lons)), numpy.maximum.reduce(numpy.ravel(lons)) ) print("max/min lats") print( numpy.minimum.reduce(numpy.ravel(lats)), numpy.maximum.reduce(numpy.ravel(lats)) ) print("took", t2 - t1, "secs") # reverse transformation. t1 = perf_counter() xx, yy = awips221(lons, lats) t2 = perf_counter() print("max abs error for x") max_abs_err_x = numpy.maximum.reduce(numpy.fabs(numpy.ravel(x - xx))) print(max_abs_err_x) assert_allclose(max_abs_err_x, 0, atol=1e-4) print("max abs error for y") max_abs_err_y = numpy.maximum.reduce(numpy.fabs(numpy.ravel(y - yy))) print(max_abs_err_y) assert_allclose(max_abs_err_y, 0, atol=1e-4) print("took", t2 - t1, "secs") print("compare output with sample.out") pyproj-3.7.1/test/test_cli.py000066400000000000000000000246331475425760300162310ustar00rootroot00000000000000import argparse import os import subprocess import sys from glob import glob from pathlib import Path from unittest.mock import patch import pytest from pyproj.__main__ import main from pyproj.datadir import append_data_dir, get_data_dir, get_user_data_dir from pyproj.sync import _load_grid_geojson from test.conftest import grids_available, proj_env, tmp_chdir PYPROJ_CLI_ENDPONTS = pytest.mark.parametrize( "input_command", [["pyproj"], [sys.executable, "-m", "pyproj"]] ) @pytest.mark.cli @PYPROJ_CLI_ENDPONTS def test_main(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command, stderr=subprocess.STDOUT ).decode("utf-8") assert "pyproj version:" in output assert "PROJ version:" in output assert "-v, --verbose Show verbose debugging version information." in output @pytest.mark.cli @PYPROJ_CLI_ENDPONTS @pytest.mark.parametrize("option", ["-v", "--verbose"]) def test_main__verbose(input_command, option, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [option], stderr=subprocess.STDOUT ).decode("utf-8") assert "pyproj:" in output assert "PROJ (compiled):" in output assert "PROJ (runtime):" in output assert "data dir" in output assert "user_data_dir" in output assert "System" in output assert "python" in output assert "Python deps" in output assert "-v, --verbose " not in output @pytest.mark.cli @PYPROJ_CLI_ENDPONTS @pytest.mark.parametrize("option", [["-h"], []]) def test_sync(input_command, option, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + ["sync"] + option, stderr=subprocess.STDOUT ).decode("utf-8") assert ( "Tool for synchronizing PROJ datum and transformation support data." in output ) assert "--bbox" in output assert "--spatial-test" in output assert "--source-id" in output assert "--area-of-use" in output assert "--file" in output assert "--exclude-world-coverage" in output assert "--include-already-downloaded" in output assert "--list-files" in output assert "--system-directory" in output assert "--target-directory" in output assert "-v, --verbose" in output def _check_list_files_header(lines): assert lines[0].rstrip("\r") == "filename | source_id | area_of_use" assert lines[1].rstrip("\r") == "----------------------------------" @pytest.mark.cli @pytest.mark.network @PYPROJ_CLI_ENDPONTS def test_sync__source_id__list(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [ "sync", "--source-id", "fr_ign", "--list-files", "--include-already-downloaded", ], stderr=subprocess.STDOUT, ).decode("utf-8") lines = output.strip().split("\n") assert len(lines) > 2 _check_list_files_header(lines) for line in lines[2:]: assert "fr_ign" == line.split("|")[1].strip() @pytest.mark.cli @pytest.mark.network @PYPROJ_CLI_ENDPONTS def test_sync__area_of_use__list(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [ "sync", "--area-of-use", "France", "--list-files", "--include-already-downloaded", ], stderr=subprocess.STDOUT, ).decode("utf-8") lines = output.strip().split("\n") assert len(lines) > 2 _check_list_files_header(lines) for line in lines[2:]: assert "France" in line.split("|")[-1] @pytest.mark.cli @pytest.mark.network @PYPROJ_CLI_ENDPONTS def test_sync__file__list(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [ "sync", "--file", "ntf_r93", "--list-files", "--include-already-downloaded", ], stderr=subprocess.STDOUT, ).decode("utf-8") lines = output.strip().split("\n") assert len(lines) > 2 _check_list_files_header(lines) for line in lines[2:]: assert "ntf_r93" in line.split("|")[0] @pytest.mark.cli @pytest.mark.network @PYPROJ_CLI_ENDPONTS def test_sync__bbox__list(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [ "sync", "--bbox", "2,49,3,50", "--list-files", "--include-already-downloaded", ], stderr=subprocess.STDOUT, ).decode("utf-8") lines = output.strip().split("\n") assert len(lines) > 2 _check_list_files_header(lines) assert " | be_ign | " in output assert " | us_nga | " in output assert " | fr_ign | " in output @pytest.mark.cli @pytest.mark.network @PYPROJ_CLI_ENDPONTS def test_sync__bbox__list__exclude_world_coverage(input_command, tmpdir): with tmp_chdir(str(tmpdir)): output = subprocess.check_output( input_command + [ "sync", "--bbox", "2,49,3,50", "--exclude-world-coverage", "--list-files", "--include-already-downloaded", ], stderr=subprocess.STDOUT, ).decode("utf-8") lines = output.strip().split("\n") assert len(lines) > 2 _check_list_files_header(lines) assert " | be_ign | " in output assert " | us_nga | " not in output assert " | fr_ign | " in output @pytest.mark.cli @PYPROJ_CLI_ENDPONTS @pytest.mark.parametrize( "extra_arg", [ "--list-files", "--source-id", "--area-of-use", "--bbox", "--list-files", "--file", ], ) def test_sync__all__exclusive_error(input_command, extra_arg, tmpdir): with tmp_chdir(str(tmpdir)), pytest.raises(subprocess.CalledProcessError): subprocess.check_output( input_command + ["sync", "--all", extra_arg], stderr=subprocess.STDOUT ) @pytest.mark.network @patch( "pyproj.__main__.parser.parse_args", return_value=argparse.Namespace( bbox=None, list_files=False, file="ntf_r93", all=False, source_id=None, area_of_use=None, verbose=False, target_directory=None, system_directory=False, spatial_test="intersects", exclude_world_coverage=False, include_already_downloaded=True, ), ) @patch("pyproj.__main__._download_resource_file") def test_sync_download(download_mock, parse_args_mock): main() download_mock.assert_called_with( directory=get_user_data_dir(), file_url="https://cdn.proj.org/fr_ign_ntf_r93.tif", sha256="0aa738b3e00fd2d64f8e3cd0e76034d4792374624fa0e133922433c9491bbf2a", short_name="fr_ign_ntf_r93.tif", verbose=False, ) @pytest.mark.network @patch( "pyproj.__main__.parser.parse_args", return_value=argparse.Namespace( bbox=None, list_files=False, file="ntf_r93", all=False, source_id=None, area_of_use=None, verbose=True, target_directory="test_directory", system_directory=False, spatial_test="intersects", exclude_world_coverage=False, include_already_downloaded=True, ), ) @patch("pyproj.__main__._download_resource_file") @patch("pyproj.sync._load_grid_geojson") def test_sync_download__directory( load_grid_geojson_mock, download_mock, parse_args_mock ): load_grid_geojson_mock.return_value = _load_grid_geojson() main() download_mock.assert_called_with( directory="test_directory", file_url="https://cdn.proj.org/fr_ign_ntf_r93.tif", sha256="0aa738b3e00fd2d64f8e3cd0e76034d4792374624fa0e133922433c9491bbf2a", short_name="fr_ign_ntf_r93.tif", verbose=True, ) load_grid_geojson_mock.assert_called_with(target_directory="test_directory") @pytest.mark.network @patch( "pyproj.__main__.parser.parse_args", return_value=argparse.Namespace( bbox=None, list_files=False, file="ntf_r93", all=False, source_id=None, area_of_use=None, verbose=True, target_directory=None, system_directory=True, spatial_test="intersects", exclude_world_coverage=False, include_already_downloaded=True, ), ) @patch("pyproj.__main__._download_resource_file") @patch("pyproj.sync._load_grid_geojson") def test_sync_download__system_directory( load_grid_geojson_mock, download_mock, parse_args_mock ): load_grid_geojson_mock.return_value = _load_grid_geojson() main() datadir = get_data_dir().split(os.path.sep)[0] download_mock.assert_called_with( directory=datadir, file_url="https://cdn.proj.org/fr_ign_ntf_r93.tif", sha256="0aa738b3e00fd2d64f8e3cd0e76034d4792374624fa0e133922433c9491bbf2a", short_name="fr_ign_ntf_r93.tif", verbose=True, ) load_grid_geojson_mock.assert_called_with(target_directory=datadir) @pytest.mark.network @patch("pyproj.__main__.parser.parse_args") def test_sync__download_grids(parse_args_mock, tmp_path, capsys): parse_args_mock.return_value = argparse.Namespace( bbox=None, list_files=False, file="us_noaa_alaska", all=False, source_id=None, area_of_use=None, verbose=True, target_directory=str(tmp_path), system_directory=False, spatial_test="intersects", exclude_world_coverage=False, include_already_downloaded=False, ) main() captured = capsys.readouterr() paths = sorted(Path(path).name for path in glob(str(tmp_path.joinpath("*")))) if grids_available("us_noaa_alaska.tif", check_network=False): assert paths == ["files.geojson"] assert captured.out == "" else: assert paths == ["files.geojson", "us_noaa_alaska.tif"] assert captured.out == "Downloading: https://cdn.proj.org/us_noaa_alaska.tif\n" # make sure not downloaded again with proj_env(): append_data_dir(str(tmp_path)) main() captured = capsys.readouterr() assert captured.out == "" pyproj-3.7.1/test/test_database.py000066400000000000000000000176021475425760300172240ustar00rootroot00000000000000import pytest from pyproj.aoi import AreaOfInterest, BBox from pyproj.database import ( Unit, get_authorities, get_codes, get_database_metadata, get_units_map, query_crs_info, query_utm_crs_info, ) from pyproj.enums import PJType def test_backwards_compatible_import_paths(): from pyproj import ( # noqa: F401 pylint: disable=unused-import get_authorities, get_codes, get_units_map, ) def test_units_map__default(): units_map = get_units_map() assert isinstance(units_map["metre"], Unit) assert units_map["metre"].name == "metre" assert units_map["metre"].auth_name == "EPSG" assert units_map["metre"].code == "9001" assert units_map["metre"].category == "linear" assert units_map["metre"].conv_factor == 1 assert units_map["metre"].proj_short_name == "m" assert not units_map["metre"].deprecated any_deprecated = False for item in units_map.values(): any_deprecated = any_deprecated or item.deprecated assert not any_deprecated @pytest.mark.parametrize( "category", [ "linear", "linear_per_time", "angular", "angular_per_time", "scale", "scale_per_time", "time", ], ) def test_units_map__category(category): units_map = get_units_map(category=category) assert len(units_map) > 1 for item in units_map.values(): assert item.category == category @pytest.mark.parametrize("auth_name", ["EPSG", "PROJ"]) def test_units_map__auth_name(auth_name): units_map = get_units_map(auth_name=auth_name) assert len(units_map) > 1 for item in units_map.values(): assert item.auth_name == auth_name @pytest.mark.parametrize("deprecated", ["zzz", "True", True]) def test_units_map__deprecated(deprecated): units_map = get_units_map(allow_deprecated=deprecated) assert len(units_map) > 1 any_deprecated = False for item in units_map.values(): any_deprecated = any_deprecated or item.deprecated assert any_deprecated @pytest.mark.parametrize("auth_name, category", [(None, 1), (1, None)]) def test_units_map__invalid(auth_name, category): with pytest.raises(TypeError): get_units_map(auth_name=auth_name, category=category) def test_get_authorities(): assert "EPSG" in get_authorities() @pytest.mark.parametrize( "auth, pj_type, deprecated", [ ("IGNF", PJType.ELLIPSOID, False), ("EPSG", PJType.CRS, False), ("EPSG", PJType.CRS, True), ("PROJ", PJType.ELLIPSOID, False), ("IGNF", "ELLIPSOID", False), ("EPSG", "CRS", False), ("EPSG", "crs", True), ("PROJ", "ellipsoid", False), ], ) def test_get_codes(auth, pj_type, deprecated): assert get_codes(auth, pj_type, deprecated) @pytest.mark.parametrize( "auth, pj_type", [("blob", "BOUND_CRS"), ("PROJ", PJType.BOUND_CRS), ("ITRF", PJType.BOUND_CRS)], ) def test_get_codes__empty(auth, pj_type): assert not get_codes(auth, pj_type) def test_get_codes__derived_projected_crs(): assert not get_codes("EPSG", PJType.DERIVED_PROJECTED_CRS) def test_get_codes__invalid_auth(): with pytest.raises(TypeError): get_codes(123, PJType.BOUND_CRS) def test_get_codes__invalid_code(): with pytest.raises(ValueError): get_codes("ITRF", "frank") @pytest.mark.parametrize( "auth, pj_type, deprecated", [ (None, None, False), ("EPSG", PJType.PROJECTED_CRS, False), ("EPSG", PJType.PROJECTED_CRS, True), ("IGNF", [PJType.GEOGRAPHIC_3D_CRS, PJType.GEOGRAPHIC_2D_CRS], False), ("EPSG", "PROJECTED_CRS", False), ("EPSG", "Projected_Crs", True), ], ) def test_query_crs_info(auth, pj_type, deprecated): crs_info_list = query_crs_info(auth, pj_type, allow_deprecated=deprecated) assert crs_info_list any_deprecated = any(crs_info.deprecated for crs_info in crs_info_list) if deprecated: assert any_deprecated else: assert not any_deprecated def test_query_crs_info__derived_projected_crs(): assert not query_crs_info(pj_types=PJType.DERIVED_PROJECTED_CRS) @pytest.mark.parametrize( "auth, pj_type", [ ("blob", "BOUND_CRS"), ("IGNF", PJType.ELLIPSOID), ("PROJ", PJType.BOUND_CRS), ("ITRF", PJType.BOUND_CRS), ], ) def test_query_crs_info__empty(auth, pj_type): assert not query_crs_info(auth, pj_type) def test_query_crs_info__invalid_auth(): with pytest.raises(TypeError): query_crs_info(123, PJType.BOUND_CRS) def test_query_crs_info__invalid_code(): with pytest.raises(ValueError): query_crs_info("ITRF", "frank") def test_query_crs_info__aoi(): aoi = BBox(west=-40, south=50, east=-20, north=70) crs_info_list = query_crs_info( auth_name="ESRI", pj_types=PJType.PROJECTED_CRS, area_of_interest=AreaOfInterest( west_lon_degree=aoi.west, south_lat_degree=aoi.south, east_lon_degree=aoi.east, north_lat_degree=aoi.north, ), ) assert crs_info_list not_contains_present = False for crs_info in crs_info_list: bbox = BBox(*crs_info.area_of_use.bounds) assert bbox.intersects(aoi) assert crs_info.auth_name == "ESRI" assert crs_info.type == PJType.PROJECTED_CRS assert not crs_info.deprecated if not bbox.contains(aoi): not_contains_present = True assert not_contains_present def test_query_crs_info__aoi_contains(): aoi = BBox(west=-40, south=50, east=-20, north=70) crs_info_list = query_crs_info( auth_name="IGNF", pj_types=[PJType.PROJECTED_CRS], area_of_interest=AreaOfInterest( west_lon_degree=aoi.west, south_lat_degree=aoi.south, east_lon_degree=aoi.east, north_lat_degree=aoi.north, ), contains=True, ) assert crs_info_list for crs_info in crs_info_list: assert BBox(*crs_info.area_of_use.bounds).contains(aoi) assert crs_info.auth_name == "IGNF" assert crs_info.type == PJType.PROJECTED_CRS assert not crs_info.deprecated @pytest.mark.parametrize("datum_name", ["WGS 84", "WGS84", "NAD27", "NAD83"]) def test_query_utm_crs_info__aoi_datum_name(datum_name): aoi = BBox(west=-93.581543, south=42.032974, east=-93.581543, north=42.032974) crs_info_list = query_utm_crs_info( datum_name=datum_name, area_of_interest=AreaOfInterest( west_lon_degree=aoi.west, south_lat_degree=aoi.south, east_lon_degree=aoi.east, north_lat_degree=aoi.north, ), ) assert len(crs_info_list) == 1 crs_info = crs_info_list[0] bbox = BBox(*crs_info.area_of_use.bounds) assert bbox.intersects(aoi) assert "UTM zone" in crs_info.name assert datum_name.replace(" ", "") in crs_info.name.replace(" ", "") assert crs_info.auth_name == "EPSG" assert crs_info.type == PJType.PROJECTED_CRS assert not crs_info.deprecated def test_query_utm_crs_info__aoi_contains(): aoi = BBox(west=41, south=50, east=42, north=51) crs_info_list = query_utm_crs_info( area_of_interest=AreaOfInterest( west_lon_degree=aoi.west, south_lat_degree=aoi.south, east_lon_degree=aoi.east, north_lat_degree=aoi.north, ), contains=True, ) assert crs_info_list for crs_info in crs_info_list: assert BBox(*crs_info.area_of_use.bounds).contains(aoi) assert "UTM zone" in crs_info.name assert crs_info.auth_name == "EPSG" assert crs_info.type == PJType.PROJECTED_CRS assert not crs_info.deprecated def test_get_database_metadata(): epsg_version = get_database_metadata("EPSG.VERSION") assert epsg_version assert isinstance(epsg_version, str) def test_get_database_metadata__invalid(): assert get_database_metadata("doesnotexist") is None pyproj-3.7.1/test/test_datadir.py000066400000000000000000000210431475425760300170620ustar00rootroot00000000000000import logging import os from contextlib import contextmanager from pathlib import Path from unittest.mock import patch import pytest from pyproj import CRS, Transformer, get_codes, set_use_global_context from pyproj.datadir import ( DataDirError, append_data_dir, get_data_dir, get_user_data_dir, set_data_dir, ) from pyproj.enums import PJType from pyproj.exceptions import CRSError from test.conftest import proj_env @contextmanager def proj_logging_env(): """ Ensure handler is added and then removed at end. """ console_handler = logging.StreamHandler() formatter = logging.Formatter("%(threadName)s:%(levelname)s:%(message)s") console_handler.setFormatter(formatter) logger = logging.getLogger("pyproj") logger.addHandler(console_handler) logger.setLevel(logging.DEBUG) try: yield finally: logger.removeHandler(console_handler) def create_projdb(tmpdir): Path(tmpdir, "proj.db").write_text("DUMMY proj.db") _INVALID_PATH = Path("/invalid/path/to/nowhere") def test_get_data_dir__missing(): with ( proj_env(), pytest.raises(DataDirError), patch.dict(os.environ, {}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.shutil.which", return_value=None), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), ): assert get_data_dir() is None @pytest.mark.parametrize("projdir_type", [str, Path]) def test_get_data_dir__from_user(projdir_type, tmp_path): tmpdir = tmp_path / "proj" tmpdir.mkdir() tmpdir_env = tmp_path / "proj_env" tmpdir_env.mkdir() with ( proj_env(), patch.dict(os.environ, {"PROJ_DATA": str(tmpdir_env)}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=tmpdir / "datadir.py"), patch("pyproj.datadir.sys.prefix", str(tmpdir_env)), ): # noqa: E501 create_projdb(tmpdir) create_projdb(tmpdir_env) set_data_dir(projdir_type(tmpdir)) internal_proj_dir = tmpdir / "proj_dir" / "share" / "proj" internal_proj_dir.mkdir(parents=True) create_projdb(internal_proj_dir) assert get_data_dir() == str(tmpdir) def test_get_data_dir__internal(tmp_path): tmpdir = tmp_path / "proj" tmpdir.mkdir() tmpdir_fake = tmp_path / "proj_fake" tmpdir_fake.mkdir() with ( proj_env(), patch.dict( os.environ, {"PROJ_LIB": str(tmpdir_fake), "PROJ_DATA": str(tmpdir_fake)}, clear=True, ), patch("pyproj.datadir.Path.absolute", return_value=tmpdir / "datadir.py"), patch("pyproj.datadir.sys.prefix", str(tmpdir_fake)), ): create_projdb(tmpdir) create_projdb(tmpdir_fake) internal_proj_dir = tmpdir / "proj_dir" / "share" / "proj" internal_proj_dir.mkdir(parents=True) create_projdb(internal_proj_dir) assert get_data_dir() == str(internal_proj_dir) def test_get_data_dir__from_env_var__proj_lib(tmp_path): with ( proj_env(), patch.dict(os.environ, {"PROJ_LIB": str(tmp_path)}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), ): create_projdb(tmp_path) assert get_data_dir() == str(tmp_path) def test_get_data_dir__from_env_var__proj_data(tmp_path): with ( proj_env(), patch.dict(os.environ, {"PROJ_DATA": str(tmp_path)}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), ): create_projdb(tmp_path) assert get_data_dir() == str(tmp_path) def test_get_data_dir__from_env_var__multiple(tmp_path): tmpdir = os.pathsep.join([str(tmp_path) for _ in range(3)]) with ( proj_env(), patch.dict(os.environ, {"PROJ_DATA": tmpdir}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), ): create_projdb(tmp_path) assert get_data_dir() == tmpdir def test_get_data_dir__from_prefix(tmp_path): with ( proj_env(), patch.dict(os.environ, {}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(tmp_path)), ): proj_dir = tmp_path / "share" / "proj" proj_dir.mkdir(parents=True) create_projdb(proj_dir) assert get_data_dir() == str(proj_dir) def test_get_data_dir__from_prefix__conda_windows(tmp_path): with ( proj_env(), patch.dict(os.environ, {}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(tmp_path)), ): proj_dir = tmp_path / "Library" / "share" / "proj" proj_dir.mkdir(parents=True) create_projdb(proj_dir) assert get_data_dir() == str(proj_dir) def test_get_data_dir__from_path(tmp_path): with ( proj_env(), patch.dict(os.environ, {}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=_INVALID_PATH), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), patch( "pyproj.datadir.shutil.which", return_value=str(tmp_path / "bin" / "proj") ), ): proj_dir = tmp_path / "share" / "proj" proj_dir.mkdir(parents=True) create_projdb(proj_dir) assert get_data_dir() == str(proj_dir) @pytest.mark.parametrize("projdir_type", [str, Path]) def test_append_data_dir__internal(projdir_type, tmp_path): with ( proj_env(), patch.dict(os.environ, {}, clear=True), patch("pyproj.datadir.Path.absolute", return_value=tmp_path / "datadir.py"), patch("pyproj.datadir.sys.prefix", str(_INVALID_PATH)), ): create_projdb(tmp_path) internal_proj_dir = tmp_path / "proj_dir" / "share" / "proj" internal_proj_dir.mkdir(parents=True) create_projdb(internal_proj_dir) extra_datadir = tmp_path / "extra_datumgrids" append_data_dir(projdir_type(extra_datadir)) assert get_data_dir() == os.pathsep.join( [str(internal_proj_dir), str(extra_datadir)] ) @pytest.mark.slow def test_creating_multiple_crs_without_file_limit(): """ This test checks for two things: 1. Ensure database connection is closed for file limit https://github.com/pyproj4/pyproj/issues/374 2. Ensure core-dumping does not occur when many objects are created https://github.com/pyproj4/pyproj/issues/678 """ codes = get_codes("EPSG", PJType.PROJECTED_CRS, False) assert [CRS.from_epsg(code) for code in codes] def test_get_user_data_dir(): assert get_user_data_dir().endswith("proj") @patch.dict("os.environ", {"PYPROJ_GLOBAL_CONTEXT": "ON"}, clear=True) def test_set_use_global_context__default_on(): with pytest.warns(FutureWarning): set_use_global_context() @patch.dict("os.environ", {"PYPROJ_GLOBAL_CONTEXT": "OFF"}, clear=True) def test_set_use_global_context__on(): with pytest.warns(FutureWarning): set_use_global_context(True) def test_proj_debug_logging(capsys): with proj_logging_env(): with pytest.warns(FutureWarning): transformer = Transformer.from_proj("+init=epsg:4326", "+init=epsg:27700") transformer.transform(100000, 100000) captured = capsys.readouterr() if os.environ.get("PROJ_DEBUG") == "3": assert "PROJ_TRACE" in captured.err assert "PROJ_DEBUG" in captured.err elif os.environ.get("PROJ_DEBUG") == "2": assert "PROJ_TRACE" not in captured.err assert "PROJ_DEBUG" in captured.err else: assert "PROJ_ERROR" in captured.err def test_proj_debug_logging__error(capsys): with proj_logging_env(), pytest.raises(CRSError): CRS("INVALID STRING") captured = capsys.readouterr() if os.environ.get("PROJ_DEBUG") == "3": assert "PROJ_TRACE" in captured.err assert "PROJ_DEBUG" in captured.err assert "PROJ_ERROR" in captured.err elif os.environ.get("PROJ_DEBUG") == "2": assert "PROJ_TRACE" not in captured.err assert "PROJ_DEBUG" in captured.err assert "PROJ_ERROR" in captured.err else: assert captured.err == "" assert captured.out == "" pyproj-3.7.1/test/test_datum.py000066400000000000000000000015271475425760300165710ustar00rootroot00000000000000import pytest from numpy.testing import assert_almost_equal from pyproj import CRS, Proj, transform from test.conftest import grids_available @pytest.mark.grid @pytest.mark.parametrize("proj_class", [Proj, CRS]) def test_datum(proj_class): p1 = proj_class(proj="latlong", datum="WGS84") s_1 = -111.5 s_2 = 45.25919444444 p2 = proj_class(proj="utm", zone=10, datum="NAD27") with pytest.warns(FutureWarning): x2, y2 = transform(p1, p2, s_1, s_2) if grids_available("us_noaa_emhpgn.tif"): assert_almost_equal((x2, y2), (1402286.33, 5076292.30), decimal=2) elif grids_available("us_noaa_conus.tif"): assert_almost_equal((x2, y2), (1402285.98, 5076292.42), decimal=2) else: # https://github.com/OSGeo/PROJ/issues/1808 assert_almost_equal((x2, y2), (1402288.54, 5076296.64), decimal=2) pyproj-3.7.1/test/test_datum_shift.py000066400000000000000000000043241475425760300177640ustar00rootroot00000000000000import pytest from numpy.testing import assert_almost_equal from pyproj import Proj, transform # illustrates the use of the transform function to # perform coordinate transformations with datum shifts. # # This example is from Roberto Vidmar # # Test point is Trieste, Molo Sartorio # # This data come from the Istituto Geografico Militare (IGM), as well as # the 7 parameters to transform from Gauss-Boaga (our reference frame) # to WGS84 # # WGS84 Lat: 45d38'49.879" (45.647188611) # WGS84 Lon: 13d45'34.397" (13.759554722) # WGS84 z: 52.80; # UTM 33: 403340.97 5055597.17 # GB: 2423346.99 5055619.87 UTM_x = 403340.9672367854 UTM_y = 5055597.175553089 GB_x = 2423346.99 GB_y = 5055619.87 WGS84_lat = 45.647188611 # Degrees WGS84_lon = 13.759554722 # Degrees UTM_z = WGS84_z = 52.8 # Ellipsoidical height in meters WGS84_PROJ = Proj(proj="latlong", datum="WGS84") UTM_33_PROJ = Proj(proj="utm", zone="33") with pytest.warns(FutureWarning): GAUSSSB_PROJ = Proj( init="epsg:3004", towgs84="-122.74,-34.27,-22.83,-1.884,-3.400,-3.030,-15.62" ) def test_shift_wgs84_to_utm33(): with pytest.warns(FutureWarning): xutm33, yutm33, zutm33 = transform( WGS84_PROJ, UTM_33_PROJ, WGS84_lon, WGS84_lat, WGS84_z ) assert_almost_equal((xutm33, yutm33, zutm33), (UTM_x, UTM_y, UTM_z)) def test_shift_utm33_to_wgs84(): with pytest.warns(FutureWarning): back_lon, back_lat, back_z = transform( UTM_33_PROJ, WGS84_PROJ, UTM_x, UTM_y, UTM_z ) assert_almost_equal((back_lon, back_lat, back_z), (WGS84_lon, WGS84_lat, WGS84_z)) def test_shift_wgs84_to_gaussb_no_ellipsoidal_height(): with pytest.warns(FutureWarning): xgb, ygb, zgb = transform(WGS84_PROJ, GAUSSSB_PROJ, WGS84_lon, WGS84_lat, 0) assert_almost_equal((xgb, ygb, zgb), (GB_x, 5055619.899, 0), decimal=2) def test_shift_gaussb_to_wgs84_no_ellipsoidal_height(): with pytest.warns(FutureWarning): back_lon, back_lat, back_z = transform(GAUSSSB_PROJ, WGS84_PROJ, GB_x, GB_y, 0) assert_almost_equal( (back_lon, back_lat, back_z), (WGS84_lon, WGS84_lat, 0), decimal=3 ) pyproj-3.7.1/test/test_doctest_wrapper.py000066400000000000000000000031061475425760300206570ustar00rootroot00000000000000""" This is a wrapper for the doctests in pyproj """ import doctest import warnings import pytest import pyproj from test.conftest import proj_network_env def test_doctests(): """run the examples in the docstrings using the doctest module""" with warnings.catch_warnings(): warnings.filterwarnings( "ignore", "You will likely lose important projection information when", UserWarning, ) failure_count_proj, test_count = doctest.testmod(pyproj.proj, verbose=True) failure_count_crs, test_count_crs = doctest.testmod(pyproj.crs, verbose=True) failure_count_geod, test_count_geod = doctest.testmod(pyproj.geod, verbose=True) failure_count = failure_count_proj + failure_count_crs + failure_count_geod expected_failure_count = 0 try: import shapely.geometry # noqa: F401 pylint: disable=unused-import except (ImportError, OSError): # missing shapely expected_failure_count = 6 # if the below line fails, doctests have failed assert ( failure_count == expected_failure_count ), f"{failure_count} of the doctests failed" @pytest.mark.network def test_doctests__network(): """run the examples in the docstrings using the doctest module that require the network """ with proj_network_env(): pyproj.network.set_network_enabled(active=True) with pytest.warns(FutureWarning): failure_count, _ = doctest.testmod(pyproj.transformer, verbose=True) assert failure_count == 0, f"{failure_count} of the doctests failed" pyproj-3.7.1/test/test_exception_logging.py000066400000000000000000000005231475425760300211560ustar00rootroot00000000000000import pytest from pyproj import CRS, Proj from pyproj.exceptions import CRSError, ProjError def test_proj_exception(): with pytest.raises(ProjError, match="Internal Proj Error"): Proj("+proj=bobbyjoe") def test_crs_exception(): with pytest.raises(CRSError, match="Internal Proj Error"): CRS("+proj=bobbyjoe") pyproj-3.7.1/test/test_geod.py000066400000000000000000000653241475425760300164020ustar00rootroot00000000000000import math import pickle from contextlib import nullcontext from itertools import permutations import numpy import pytest from numpy.testing import assert_almost_equal, assert_array_equal from pyproj import Geod from pyproj.geod import GeodIntermediateFlag, reverse_azimuth try: from shapely.geometry import ( LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, ) from shapely.geometry.polygon import orient SHAPELY_LOADED = True except (ImportError, OSError): SHAPELY_LOADED = False skip_shapely = pytest.mark.skipif(not SHAPELY_LOADED, reason="Missing shapely") # BOSTON _BOSTON_LAT = 42.0 + (15.0 / 60.0) _BOSTON_LON = -71.0 - (7.0 / 60.0) # PORTLAND _PORTLAND_LAT = 45.0 + (31.0 / 60.0) _PORTLAND_LON = -123.0 - (41.0 / 60.0) @pytest.mark.parametrize("return_back_azimuth", [True, False]) @pytest.mark.parametrize( "ellipsoid,true_az12,true_az21,expected_distance", [ ("clrk66", -66.5305947876623, 75.65363415556968, 4164192.708), ("WGS84", -66.5305947876623, 75.65363415556968, 4164074.239), ], ) def test_geodesic_inv( ellipsoid, true_az12, true_az21, expected_distance, return_back_azimuth, scalar_and_array, ): geod = Geod(ellps=ellipsoid) az12, az21, dist = geod.inv( scalar_and_array(_BOSTON_LON), scalar_and_array(_BOSTON_LAT), scalar_and_array(_PORTLAND_LON), scalar_and_array(_PORTLAND_LAT), return_back_azimuth=return_back_azimuth, ) if not return_back_azimuth: az21 = reverse_azimuth(az21) assert_almost_equal( (az12, az21, dist), ( scalar_and_array(true_az12), scalar_and_array(true_az21), scalar_and_array(expected_distance), ), decimal=3, ) @pytest.mark.parametrize( "lon_start,lat_start,lon_end,lat_end,res12,res21,resdist", [ (_BOSTON_LON, _BOSTON_LAT, _BOSTON_LON, _BOSTON_LAT, 180.0, 0.0, 0.0), ( _BOSTON_LON, _BOSTON_LAT, -80.79664651607472, 44.83744724383204, -66.53059478766238, 106.79071710136431, 832838.5416198927, ), ( -80.79664651607472, 44.83744724383204, -91.21816704002396, 46.536201500764776, -73.20928289863558, 99.32289055927389, 832838.5416198935, ), ( -91.21816704002396, 46.536201500764776, -102.10621593474447, 47.236494630072166, -80.67710944072617, 91.36325611787134, 832838.5416198947, ), ( -102.10621593474447, 47.236494630072166, -113.06616309750775, 46.88821539471925, -88.63674388212858, 83.32809401477382, 832838.5416198922, ), ( -113.06616309750775, 46.88821539471925, _PORTLAND_LON, _PORTLAND_LAT, -96.67190598522616, 75.65363415556973, 832838.5416198926, ), ], ) def test_geodesic_inv__multiple_points( lon_start, lat_start, lon_end, lat_end, res12, res21, resdist, scalar_and_array ): geod = Geod(ellps="clrk66") o_az12, o_az21, o_dist = geod.inv( scalar_and_array(lon_start), scalar_and_array(lat_start), scalar_and_array(lon_end), scalar_and_array(lat_end), ) assert_almost_equal( (o_az12, o_az21, o_dist), (scalar_and_array(res12), scalar_and_array(res21), scalar_and_array(resdist)), ) @pytest.mark.parametrize( "flag", [GeodIntermediateFlag.AZIS_DISCARD, GeodIntermediateFlag.AZIS_KEEP] ) def test_geodesic_inv_intermediate__azis_flag(flag): geod = Geod(ellps="clrk66") point_count = 7 res = geod.inv_intermediate( lon1=_BOSTON_LON, lat1=_BOSTON_LAT, lon2=_PORTLAND_LON, lat2=_PORTLAND_LAT, npts=point_count, initial_idx=0, terminus_idx=0, flags=flag, return_back_azimuth=False, ) assert res.npts == point_count assert_almost_equal(res.del_s, 694032.1180165777) assert_almost_equal(res.dist, 4164192.7080994663) assert_almost_equal( res.lons, [ _BOSTON_LON, -79.12667425528419, -87.67579185665936, -96.62666097960022, -105.7727853838606, -114.86985739838673, _PORTLAND_LON, ], ) assert_almost_equal( res.lats, [ _BOSTON_LAT, 44.46434542986116, 46.07643938331146, 47.0159762562249, 47.23724050253994, 46.72890509939583, _PORTLAND_LAT, ], ) if flag == GeodIntermediateFlag.AZIS_DISCARD: assert res.azis is None else: assert_almost_equal( res.azis, [ -66.5305947876623, -72.03560255472611, -78.11540731053181, -84.61959820187617, -91.32904268899453, -97.98697931255812, -104.34636584443031, ], ) @pytest.mark.parametrize( "flag", [GeodIntermediateFlag.AZIS_DISCARD, GeodIntermediateFlag.AZIS_KEEP] ) def test_geodesic_inv_intermediate__azis_flag__numpy(flag): geod = Geod(ellps="clrk66") point_count = 3 lons_b = numpy.empty(point_count) lats_b = numpy.empty(point_count) res = geod.inv_intermediate( out_lons=lons_b, out_lats=lats_b, lon1=_BOSTON_LON, lat1=_BOSTON_LAT, lon2=_PORTLAND_LON, lat2=_PORTLAND_LAT, npts=point_count, initial_idx=0, terminus_idx=0, flags=flag, return_back_azimuth=False, ) assert res.npts == point_count assert_almost_equal(res.del_s, 2082096.3540497331) assert_almost_equal(res.dist, 4164192.7080994663) assert_almost_equal(res.lons, [_BOSTON_LON, -96.62666098, _PORTLAND_LON]) assert_almost_equal( res.lats, [_BOSTON_LAT, 47.01597626, _PORTLAND_LAT], ) if flag == GeodIntermediateFlag.AZIS_DISCARD: assert res.azis is None else: assert_almost_equal( res.azis, [-66.5305947876623, -84.61959820187617, -104.34636584443031], ) assert res.lons is lons_b assert res.lats is lats_b @pytest.mark.parametrize("return_back_azimuth", [True, False]) def test_geodesic_inv_intermediate__numpy(return_back_azimuth): geod = Geod(ellps="clrk66") point_count = 5 lons = numpy.empty(point_count) lats = numpy.empty(point_count) azis = numpy.empty(point_count) with pytest.warns(UserWarning) if return_back_azimuth is None else nullcontext(): res = geod.inv_intermediate( out_lons=lons, out_lats=lats, out_azis=azis, lon1=_BOSTON_LON, lat1=_BOSTON_LAT, lon2=_PORTLAND_LON, lat2=_PORTLAND_LAT, npts=point_count, initial_idx=0, terminus_idx=0, return_back_azimuth=return_back_azimuth, ) assert res.npts == point_count assert_almost_equal(res.del_s, 1041048.1770248666) assert_almost_equal(res.dist, 4164192.7080994663) assert_almost_equal( res.lons, [_BOSTON_LON, -83.34061499, -96.62666098, -110.34292364, _PORTLAND_LON], ) assert_almost_equal( res.lats, [_BOSTON_LAT, 45.35049848, 47.01597626, 47.07350417, _PORTLAND_LAT] ) out_azis = res.azis[:-1] if return_back_azimuth in [True, None]: out_azis = reverse_azimuth(out_azis) assert_almost_equal( out_azis, [-66.53059479, -75.01125433, -84.6195982, -94.68069764] ) assert res.lons is lons assert res.lats is lats assert res.azis is azis @pytest.mark.parametrize( "del_s_fact,flag", [ (1, GeodIntermediateFlag.NPTS_ROUND), (4.5 / 5, GeodIntermediateFlag.NPTS_TRUNC), (5.5 / 5, GeodIntermediateFlag.NPTS_CEIL), ], ) def test_geodesic_inv_intermediate__del_s__numpy(del_s_fact, flag): geod = Geod(ellps="clrk66") point_count = 5 lons = numpy.empty(point_count) lats = numpy.empty(point_count) azis = numpy.empty(point_count) dist = 4164192.7080994663 del_s = dist / (point_count - 1) res = geod.inv_intermediate( out_lons=lons, out_lats=lats, out_azis=azis, lon1=_BOSTON_LON, lat1=_BOSTON_LAT, lon2=_PORTLAND_LON, lat2=_PORTLAND_LAT, del_s=del_s * del_s_fact, initial_idx=0, terminus_idx=0, flags=flag, return_back_azimuth=False, ) assert res.npts == point_count assert_almost_equal(res.del_s, del_s) assert_almost_equal(res.dist, dist) assert_almost_equal( res.lons, [_BOSTON_LON, -83.34061499, -96.62666098, -110.34292364, _PORTLAND_LON], ) assert_almost_equal( res.lats, [_BOSTON_LAT, 45.35049848, 47.01597626, 47.07350417, _PORTLAND_LAT] ) assert_almost_equal( res.azis[:-1], [-66.53059479, -75.01125433, -84.6195982, -94.68069764] ) assert res.lons is lons assert res.lats is lats assert res.azis is azis @pytest.mark.parametrize("return_back_azimuth", [True, False]) def test_geodesic_fwd_intermediate__numpy(return_back_azimuth): geod = Geod(ellps="clrk66") point_count = 5 lons = numpy.empty(point_count) lats = numpy.empty(point_count) azis = numpy.empty(point_count) true_az12 = -66.5305947876623 dist = 4164192.7080994663 del_s = dist / (point_count - 1) with pytest.warns(UserWarning) if return_back_azimuth is None else nullcontext(): res = geod.fwd_intermediate( out_lons=lons, out_lats=lats, out_azis=azis, lon1=_BOSTON_LON, lat1=_BOSTON_LAT, azi1=true_az12, npts=point_count, del_s=del_s, initial_idx=0, terminus_idx=0, return_back_azimuth=return_back_azimuth, ) assert res.npts == point_count assert res.lons is lons assert res.lats is lats assert res.azis is azis assert_almost_equal(res.del_s, del_s) assert_almost_equal(res.dist, dist) assert_almost_equal( res.lons, [-71.11666667, -83.34061499, -96.62666098, -110.34292364, -123.68333333], ) assert_almost_equal( res.lats, [42.25, 45.35049848, 47.01597626, 47.07350417, 45.51666667] ) if return_back_azimuth in [True, None]: azis = reverse_azimuth(azis) assert_almost_equal( azis, [-66.53059479, -75.01125433, -84.6195982, -94.68069764, -104.34636584] ) @pytest.mark.parametrize("return_back_azimuth", [True, False]) @pytest.mark.parametrize( "ellipsoid,true_az12,true_az21,expected_distance", [ ("clrk66", -66.5305947876623, 75.65363415556968, 4164192.708), ("WGS84", -66.5305947876623, 75.65363415556968, 4164074.239), ], ) def test_geodesic_fwd( ellipsoid, true_az12, true_az21, expected_distance, return_back_azimuth, scalar_and_array, ): geod = Geod(ellps=ellipsoid) endlon, endlat, backaz = geod.fwd( scalar_and_array(_BOSTON_LON), scalar_and_array(_BOSTON_LAT), scalar_and_array(true_az12), scalar_and_array(expected_distance), return_back_azimuth=return_back_azimuth, ) if not return_back_azimuth: backaz = reverse_azimuth(backaz) assert_almost_equal( (endlon, endlat, backaz), ( scalar_and_array(_PORTLAND_LON), scalar_and_array(_PORTLAND_LAT), scalar_and_array(true_az21), ), decimal=3, ) @pytest.mark.parametrize("include_initial", [True, False]) @pytest.mark.parametrize("include_terminus", [True, False]) def test_geodesic_npts(include_initial, include_terminus): geod = Geod(ellps="clrk66") initial_idx = int(not include_initial) terminus_idx = int(not include_terminus) lonlats = geod.npts( _BOSTON_LON, _BOSTON_LAT, _PORTLAND_LON, _PORTLAND_LAT, npts=6 - initial_idx - terminus_idx, initial_idx=initial_idx, terminus_idx=terminus_idx, ) expected_lonlats = [ (-80.797, 44.837), (-91.218, 46.536), (-102.106, 47.236), (-113.066, 46.888), ] if include_initial: expected_lonlats.insert(0, (_BOSTON_LON, _BOSTON_LAT)) if include_terminus: expected_lonlats.append((_PORTLAND_LON, _PORTLAND_LAT)) assert_almost_equal(lonlats, expected_lonlats, decimal=3) @pytest.mark.parametrize( "input_data", [ [1.0, 2.0, 3.0, math.nan], [1.0, 2.0, math.nan, 4.0], [1.0, math.nan, 3.0, 4.0], [math.nan, 2.0, 3.0, 4.0], [math.nan, math.nan, math.nan, math.nan], ], ) def test_geodesic_npts__nan(input_data): geod = Geod(ellps="WGS84") assert_array_equal(geod.npts(*input_data, npts=1), [(math.nan, math.nan)]) @pytest.mark.parametrize( "ellipsoid,expected_azi12,expected_az21,expected_dist", [("clrk66", -66.531, 75.654, 4164192.708), ("WGS84", -66.530, 75.654, 4164074.239)], ) def test_geodesic_inv__pickle( ellipsoid, expected_azi12, expected_az21, expected_dist, tmp_path, scalar_and_array ): geod = Geod(ellps=ellipsoid) az12, az21, dist = geod.inv( scalar_and_array(_BOSTON_LON), scalar_and_array(_BOSTON_LAT), scalar_and_array(_PORTLAND_LON), scalar_and_array(_PORTLAND_LAT), ) assert_almost_equal( (az12, az21, dist), ( scalar_and_array(expected_azi12), scalar_and_array(expected_az21), scalar_and_array(expected_dist), ), decimal=3, ) pickle_file = tmp_path / "geod1.pickle" with open(pickle_file, "wb") as gp1w: pickle.dump(geod, gp1w, -1) with open(pickle_file, "rb") as gp1: geod_pickle = pickle.load(gp1) pickle_az12, pickle_az21, pickle_dist = geod_pickle.inv( scalar_and_array(_BOSTON_LON), scalar_and_array(_BOSTON_LAT), scalar_and_array(_PORTLAND_LON), scalar_and_array(_PORTLAND_LAT), ) assert_almost_equal( (pickle_az12, pickle_az21, pickle_dist), ( scalar_and_array(expected_azi12), scalar_and_array(expected_az21), scalar_and_array(expected_dist), ), decimal=3, ) def test_geodesic_inv__string_init(scalar_and_array): geod = Geod("+ellps=clrk66") az12, az21, dist = geod.inv( scalar_and_array(_BOSTON_LON), scalar_and_array(_BOSTON_LAT), scalar_and_array(_PORTLAND_LON), scalar_and_array(_PORTLAND_LAT), ) assert_almost_equal( (az12, az21, dist), ( scalar_and_array(-66.531), scalar_and_array(75.654), scalar_and_array(4164192.708), ), decimal=3, ) def test_line_length__single_point(): geod = Geod(ellps="WGS84") assert geod.line_length(1, 1) == 0 def test_line_length__radians(): geod = Geod(ellps="WGS84") total_length = geod.line_length([1, 2], [0.5, 1], radians=True) assert_almost_equal(total_length, 5426061.32197463, decimal=3) def test_line_lengths__single_point(): geod = Geod(ellps="WGS84") assert geod.line_lengths(1, 1) == 0 def test_line_lengths__radians(): geod = Geod(ellps="WGS84") line_lengths = geod.line_lengths([1, 2], [0.5, 1], radians=True) assert_almost_equal(line_lengths, [5426061.32197463], decimal=3) def test_polygon_area_perimeter__single_point(): geod = Geod(ellps="WGS84") area, perimeter = geod.polygon_area_perimeter(1, 1) assert area == 0 assert perimeter == 0 @skip_shapely def test_geometry_length__point(): geod = Geod(ellps="WGS84") assert geod.geometry_length(Point(1, 2)) == 0 @skip_shapely def test_geometry_length__linestring(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_length(LineString([Point(1, 2), Point(3, 4)])), 313588.39721259556, decimal=2, ) @skip_shapely def test_geometry_length__linestring__radians(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_length( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), ] ), radians=True, ), 313588.39721259556, decimal=2, ) @skip_shapely def test_geometry_length__linearring(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_length( LinearRing(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) ), 1072185.2103813463, decimal=2, ) @skip_shapely def test_geometry_length__polygon(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_length( Polygon(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) ), 1072185.2103813463, decimal=2, ) @skip_shapely def test_geometry_length__polygon__radians(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_length( Polygon( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), Point(math.radians(5), math.radians(2)), ] ) ), radians=True, ), 1072185.2103813463, decimal=2, ) @skip_shapely def test_geometry_length__multipolygon(): geod = Geod(ellps="WGS84") polygon = Polygon(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) assert_almost_equal( geod.geometry_length(MultiPolygon([polygon, polygon])), 2 * 1072185.2103813463, decimal=2, ) @skip_shapely def test_geometry_length__multipolygon__radians(): geod = Geod(ellps="WGS84") polygon = Polygon( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), Point(math.radians(5), math.radians(2)), ] ) ) assert_almost_equal( geod.geometry_length(MultiPolygon([polygon, polygon]), radians=True), 2 * 1072185.2103813463, decimal=2, ) @skip_shapely def test_geometry_length__multilinestring(): geod = Geod(ellps="WGS84") line_string = LineString([Point(1, 2), Point(3, 4), Point(5, 2)]) assert_almost_equal( geod.geometry_length(MultiLineString([line_string, line_string])), 1254353.5888503822, decimal=2, ) @skip_shapely def test_geometry_length__multipoint(): geod = Geod(ellps="WGS84") assert ( geod.geometry_length(MultiPoint([Point(1, 2), Point(3, 4), Point(5, 2)])) == 0 ) @skip_shapely def test_geometry_area_perimeter__point(): geod = Geod(ellps="WGS84") assert geod.geometry_area_perimeter(Point(1, 2)) == (0, 0) @skip_shapely def test_geometry_area_perimeter__linestring(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_area_perimeter(LineString([Point(1, 2), Point(3, 4)])), (0.0, 627176.7944251911), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__linestring__radians(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_area_perimeter( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), ] ), radians=True, ), (0.0, 627176.7944251911), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__linearring(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_area_perimeter( LinearRing(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) ), (-49187690467.58623, 1072185.2103813463), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__polygon(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_area_perimeter( Polygon(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) ), (-49187690467.58623, 1072185.2103813463), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__polygon__radians(): geod = Geod(ellps="WGS84") assert_almost_equal( geod.geometry_area_perimeter( Polygon( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), Point(math.radians(5), math.radians(2)), ] ) ), radians=True, ), (-49187690467.58623, 1072185.2103813463), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__polygon__holes(): geod = Geod(ellps="WGS84") polygon = Polygon( LineString([Point(1, 1), Point(1, 10), Point(10, 10), Point(10, 1)]), holes=[LineString([Point(1, 2), Point(3, 4), Point(5, 2)])], ) assert_almost_equal( geod.geometry_area_perimeter(orient(polygon, 1)), (944373881400.3394, 3979008.0359657984), decimal=2, ) assert_almost_equal( geod.geometry_area_perimeter(orient(polygon, -1)), (-944373881400.3394, 3979008.0359657984), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__multipolygon(): geod = Geod(ellps="WGS84") polygon = Polygon(LineString([Point(1, 2), Point(3, 4), Point(5, 2)])) assert_almost_equal( geod.geometry_area_perimeter(MultiPolygon([polygon, polygon])), (-98375380935.17245, 2144370.4207626926), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__multipolygon__radians(): geod = Geod(ellps="WGS84") polygon = Polygon( LineString( [ Point(math.radians(1), math.radians(2)), Point(math.radians(3), math.radians(4)), Point(math.radians(5), math.radians(2)), ] ) ) assert_almost_equal( geod.geometry_area_perimeter(MultiPolygon([polygon, polygon]), radians=True), (-98375380935.17245, 2144370.4207626926), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__multilinestring(): geod = Geod(ellps="WGS84") line_string = LineString([Point(1, 2), Point(3, 4), Point(5, 2)]) assert_almost_equal( geod.geometry_area_perimeter(MultiLineString([line_string, line_string])), (-98375380935.17245, 2144370.4207626926), decimal=2, ) @skip_shapely def test_geometry_area_perimeter__multipoint(): geod = Geod(ellps="WGS84") assert geod.geometry_area_perimeter( MultiPoint([Point(1, 2), Point(3, 4), Point(5, 2)]) ) == (0, 0) @pytest.mark.parametrize( "lon,lat,az", permutations([10.0, [10.0], (10.0,)]) ) # 6 test cases def test_geod_fwd_honours_input_types(lon, lat, az): # 622 gg = Geod(ellps="clrk66") outx, outy, outz = gg.fwd(lons=lon, lats=lat, az=az, dist=0) assert isinstance(outx, type(lon)) assert isinstance(outy, type(lat)) assert isinstance(outz, type(az)) def test_geod_fwd_radians(): g = Geod(ellps="clrk66") lon1 = 1 lat1 = 1 az1 = 1 dist = 1 assert_almost_equal( numpy.rad2deg(g.fwd(lon1, lat1, az1, dist, radians=True)), g.fwd(lon1 * 180 / numpy.pi, lat1 * 180 / numpy.pi, az1 * 180 / numpy.pi, dist), ) def test_geod_inv_radians(): g = Geod(ellps="clrk66") lon1 = 0 lat1 = 0 lon2 = 1 lat2 = 1 # the third output is in distance, so we don't want to change from deg-rad there out_rad = list(g.inv(lon1, lat1, lon2, lat2, radians=True)) out_rad[0] *= 180 / numpy.pi out_rad[1] *= 180 / numpy.pi assert_almost_equal( out_rad, g.inv( lon1 * 180 / numpy.pi, lat1 * 180 / numpy.pi, lon2 * 180 / numpy.pi, lat2 * 180 / numpy.pi, ), ) @pytest.mark.parametrize("func_name", ("fwd", "inv")) @pytest.mark.parametrize("radians", (True, False)) def test_geod_scalar_array(func_name, radians): # verify two singlepoint calculations match an array of length two g = Geod(ellps="clrk66") func = getattr(g, func_name) assert_almost_equal( numpy.transpose([func(0, 0, 1, 1, radians=radians) for i in range(2)]), func([0, 0], [0, 0], [1, 1], [1, 1], radians=radians), ) @pytest.mark.parametrize( "lons1,lats1,lons2", permutations([10.0, [10.0], (10.0,), numpy.array([10.0])], 3) ) # 6 test cases def test_geod_inv_honours_input_types(lons1, lats1, lons2): # 622 gg = Geod(ellps="clrk66") outx, outy, outz = gg.inv(lons1=lons1, lats1=lats1, lons2=lons2, lats2=0) assert isinstance(outx, type(lons1)) assert isinstance(outy, type(lats1)) assert isinstance(outz, type(lons2)) def test_geodesic_fwd_inv_inplace(): gg = Geod(ellps="clrk66") _BOSTON_LON = numpy.array([0], dtype=numpy.float64) _BOSTON_LAT = numpy.array([0], dtype=numpy.float64) _PORTLAND_LON = numpy.array([1], dtype=numpy.float64) _PORTLAND_LAT = numpy.array([1], dtype=numpy.float64) az12, az21, dist = gg.inv( _BOSTON_LON, _BOSTON_LAT, _PORTLAND_LON, _PORTLAND_LAT, inplace=True ) assert az12 is _BOSTON_LON assert az21 is _BOSTON_LAT assert dist is _PORTLAND_LON endlon, endlat, backaz = gg.fwd(_BOSTON_LON, _BOSTON_LAT, az12, dist, inplace=True) assert endlon is _BOSTON_LON assert endlat is _BOSTON_LAT assert backaz is az12 @pytest.mark.parametrize("kwarg", ["b", "f", "es", "rf", "e"]) def test_geod__build_kwargs(kwarg): gg = Geod(ellps="clrk66") if kwarg == "rf": value = 1.0 / gg.f elif kwarg == "e": value = math.sqrt(gg.es) else: value = getattr(gg, kwarg) gg2 = Geod(a=gg.a, **{kwarg: value}) assert_almost_equal(gg.a, gg2.a) assert_almost_equal(gg.b, gg2.b) assert_almost_equal(gg.f, gg2.f) assert_almost_equal(gg.es, gg2.es) @pytest.mark.parametrize("radians", [False, True]) def test_geod__reverse_azimuth(radians): f = math.pi / 180 if radians else 1 xy = numpy.array( [ [0, 0 + 180], [180, 180 - 180], [-180, -180 + 180], [10, 10 - 180], [20, 20 - 180], [-10, -10 + 180], ] ) for x, y in xy: assert_almost_equal(reverse_azimuth(x * f, radians=radians), y * f) xx = xy.T[0] yy = xy.T[1] assert_almost_equal(reverse_azimuth(xx * f, radians=radians), yy * f) pyproj-3.7.1/test/test_list.py000066400000000000000000000016111475425760300164240ustar00rootroot00000000000000from numpy.testing import assert_almost_equal from pyproj.list import get_ellps_map, get_prime_meridians_map, get_proj_operations_map def test_backwards_compatible_import_paths(): from pyproj import ( # noqa: F401 pylint: disable=unused-import get_ellps_map, get_prime_meridians_map, get_proj_operations_map, ) def test_get_ellps_map(): ellps_map = get_ellps_map() assert ellps_map["WGS84"]["description"] == "WGS 84" assert_almost_equal(ellps_map["WGS84"]["a"], 6378137.0, decimal=1) assert_almost_equal(ellps_map["WGS84"]["rf"], 298.257223563, decimal=1) def test_get_prime_meridians_map(): prime_meridians_map = get_prime_meridians_map() assert prime_meridians_map["greenwich"] == "0dE" def test_get_proj_operations_map(): proj_operations_map = get_proj_operations_map() assert proj_operations_map["aea"] == "Albers Equal Area" pyproj-3.7.1/test/test_network.py000066400000000000000000000035121475425760300171440ustar00rootroot00000000000000from unittest.mock import patch import certifi import pytest from pyproj.network import set_ca_bundle_path @patch.dict("os.environ", {}, clear=True) @patch("pyproj.network._set_context_ca_bundle_path") def test_ca_bundle_path__default(c_set_ca_bundle_path_mock): set_ca_bundle_path() c_set_ca_bundle_path_mock.assert_called_with(certifi.where()) @pytest.mark.parametrize( "env_var", ["PROJ_CURL_CA_BUNDLE", "CURL_CA_BUNDLE", "SSL_CERT_FILE"] ) @patch("pyproj.network._set_context_ca_bundle_path") def test_ca_bundle_path__always_certifi(c_set_ca_bundle_path_mock, env_var): with patch.dict("os.environ", {env_var: "/tmp/dummy/path/cacert.pem"}, clear=True): set_ca_bundle_path(True) c_set_ca_bundle_path_mock.assert_called_with(certifi.where()) @patch.dict("os.environ", {}, clear=True) @patch("pyproj.network._set_context_ca_bundle_path") def test_ca_bundle_path__skip(c_set_ca_bundle_path_mock): set_ca_bundle_path(False) c_set_ca_bundle_path_mock.assert_called_with("") @pytest.mark.parametrize( "env_var", ["PROJ_CURL_CA_BUNDLE", "CURL_CA_BUNDLE", "SSL_CERT_FILE"] ) @patch("pyproj.network._set_context_ca_bundle_path") def test_ca_bundle_path__env_var_skip(c_set_ca_bundle_path_mock, env_var): with patch.dict("os.environ", {env_var: "/tmp/dummy/path/cacert.pem"}, clear=True): set_ca_bundle_path() c_set_ca_bundle_path_mock.assert_called_with("") @pytest.mark.parametrize( "env_var", ["PROJ_CURL_CA_BUNDLE", "CURL_CA_BUNDLE", "SSL_CERT_FILE"] ) @patch("pyproj.network._set_context_ca_bundle_path") def test_ca_bundle_path__custom_path(c_set_ca_bundle_path_mock, env_var): with patch.dict("os.environ", {env_var: "/tmp/dummy/path/cacert.pem"}, clear=True): set_ca_bundle_path("/my/path/to/cacert.pem") c_set_ca_bundle_path_mock.assert_called_with("/my/path/to/cacert.pem") pyproj-3.7.1/test/test_pickle.py000066400000000000000000000044361475425760300167300ustar00rootroot00000000000000"""run test.py first!""" import os import pickle import shutil import tempfile from contextlib import contextmanager import numpy from numpy.testing import assert_allclose from pyproj import Proj try: from time import perf_counter except ImportError: from time import clock as perf_counter @contextmanager def temporary_directory(): """ Get a temporary directory """ temp_dir = tempfile.mkdtemp() try: yield temp_dir finally: shutil.rmtree(temp_dir) def test_pickle(): nx = 349 ny = 277 dx = 32463.41 dy = dx print("do it again, from pickled instance ...") # find 4 lon/lat crnrs of AWIPS grid 221. llcrnrx = 0.0 llcrnry = 0.0 urcrnrx = dx * (nx - 1) urcrnry = dy * (ny - 1) dx = (urcrnrx - llcrnrx) / (nx - 1) dy = (urcrnry - llcrnry) / (ny - 1) x = llcrnrx + dx * numpy.indices((ny, nx), "f")[1, :, :] y = llcrnry + dy * numpy.indices((ny, nx), "f")[0, :, :] awips221_pre_pickle = Proj(proj="lcc", R=6371200, lat_1=50, lat_2=50, lon_0=-107) with temporary_directory() as tmpdir: with open(os.path.join(tmpdir, "test.pickle"), "wb") as pfh: pickle.dump(awips221_pre_pickle, pfh, -1) with open(os.path.join(tmpdir, "test.pickle"), "rb") as prh: awips221 = pickle.load(prh) t1 = perf_counter() lons, lats = awips221(x, y, inverse=True) t2 = perf_counter() print(f"compute lats/lons for all points on AWIPS 221 grid ({nx}x{ny})") print("max/min lons in radians") print( numpy.minimum.reduce(numpy.ravel(lons)), numpy.maximum.reduce(numpy.ravel(lons)) ) print("max/min lats in radians") print( numpy.minimum.reduce(numpy.ravel(lats)), numpy.maximum.reduce(numpy.ravel(lats)) ) print("took", t2 - t1, "secs") # reverse transformation. t1 = perf_counter() xx, yy = awips221(lons, lats) t2 = perf_counter() print("max abs error for x") max_abs_err_x = numpy.maximum.reduce(numpy.fabs(numpy.ravel(x - xx))) print(max_abs_err_x) assert_allclose(max_abs_err_x, 0, atol=1e-4) print("max abs error for y") max_abs_err_y = numpy.maximum.reduce(numpy.fabs(numpy.ravel(y - yy))) print(max_abs_err_y) assert_allclose(max_abs_err_y, 0, atol=1e-4) print("took", t2 - t1, "secs") pyproj-3.7.1/test/test_proj.py000066400000000000000000000514541475425760300164350ustar00rootroot00000000000000import concurrent.futures import math import os import unittest from unittest.mock import patch import numpy import pytest from numpy.testing import assert_almost_equal import pyproj from pyproj import Geod, Proj, pj_ellps, pj_list, transform from pyproj.exceptions import CRSError, ProjError from pyproj.geod import reverse_azimuth from test.conftest import proj_network_env class BasicTest(unittest.TestCase): def testInitWithBackupString4(self): # this fails unless backup of to_string(4) is used pj = Proj( "+proj=merc +a=6378137.0 +b=6378137.0 +nadgrids=@null " "+lon_0=0.0 +x_0=0.0 +y_0=0.0 +units=m +no_defs" ) assert pj.crs.name == "unknown" def testProjAwips221(self): # AWIPS is Advanced Weather Interactive Processing System params = {"proj": "lcc", "R": 6371200, "lat_1": 50, "lat_2": 50, "lon_0": -107} awips221 = Proj( proj=params["proj"], R=params["R"], lat_1=params["lat_1"], lat_2=params["lat_2"], lon_0=params["lon_0"], preserve_units=False, ) awips221_from_dict = Proj(params, preserve_units=False) items = sorted(val for val in awips221.crs.srs.split() if val) items_dict = sorted(val for val in awips221_from_dict.crs.srs.split() if val) self.assertEqual(items, items_dict) expected = sorted( [ "+proj=lcc", "+R=6371200", "+lat_1=50", "+lat_2=50", "+lon_0=-107", "+type=crs", ] ) self.assertEqual(items, expected) point = awips221(-145.5, 1.0) x, y = -5632642.22547495, 1636571.4883145525 self.assertAlmostEqual(point[0], x) self.assertAlmostEqual(point[1], y) pairs = [ [(-45, 45), (4351601.20766915, 7606948.029327129)], [(45, 45), (5285389.07739382, 14223336.17467613)], [(45, -45), (20394982.466924712, 21736546.456803113)], [(-45, -45), (16791730.756976362, -3794425.4816524936)], ] for point_geog, expected in pairs: point = awips221(*point_geog) self.assertAlmostEqual(point[0], expected[0]) self.assertAlmostEqual(point[1], expected[1]) point_geog2 = awips221(*point, inverse=True) self.assertAlmostEqual(point_geog[0], point_geog2[0]) self.assertAlmostEqual(point_geog[1], point_geog2[1]) def test_from_dict_with_bool(self): # issue #183 p_d = { "proj": "omerc", "lat_2": 80.27942, "lat_0": 62.87671, "lat_1": 42.751232, "ellps": "WGS84", "no_rot": True, "lon_1": 33.793186, "lon_2": -18.374414, } p = Proj(p_d) self.assertTrue("+no_rot" in p.srs.split()) p_d = { "proj": "omerc", "lat_2": 80.27942, "lat_0": 62.87671, "lat_1": 42.751232, "ellps": "WGS84", "no_rot": False, "lon_1": 33.793186, "lon_2": -18.374414, } p = Proj(p_d) self.assertFalse("+no_rot" in p.srs.split()) class InverseHammerTest(unittest.TestCase): # This is a unit test of the inverse of the hammer projection, which # was added in the 4.9.3 version of PROJ (then PROJ.4). @classmethod def setUpClass(self): self.p = Proj(proj="hammer") # hammer proj self.x, self.y = self.p(-30, 40) def test_forward(self): self.assertAlmostEqual(self.x, -2711575.083, places=3) self.assertAlmostEqual(self.y, 4395506.619, places=3) def test_inverse(self): lon, lat = self.p(self.x, self.y, inverse=True) self.assertAlmostEqual(lon, -30.0, places=3) self.assertAlmostEqual(lat, 40.0, places=3) class TypeError_Transform_Issue8_Test(unittest.TestCase): # Test for "Segmentation fault on pyproj.transform #8" # https://github.com/jswhit/pyproj/issues/8 def setUp(self): with pytest.warns(FutureWarning): self.p = Proj(init="epsg:4269") def test_tranform_none_1st_parmeter(self): # test should raise Type error if projections are not of Proj classes # version 1.9.4 produced AttributeError, now should raise TypeError with pytest.warns(FutureWarning), pytest.raises(CRSError): transform(None, self.p, -74, 39) def test_tranform_none_2nd_parmeter(self): # test should raise Type error if projections are not of Proj classes # version 1.9.4 has a Segmentation Fault, now should raise TypeError with pytest.warns(FutureWarning), pytest.raises(CRSError): transform(self.p, None, -74, 39) class Geod_NoDefs_Issue22_Test(unittest.TestCase): # Test for Issue #22, Geod with "+no_defs" in initstring # Before PR #23 merged 2015-10-07, having +no_defs in the # initstring would result in a ValueError def test_geod_nodefs(self): Geod("+a=6378137 +b=6378137 +no_defs") class ProjLatLongTypeErrorTest(unittest.TestCase): # .latlong() using in transform raised a TypeError in release 1.9.5.1 # reported in issue #53, resolved in #73. def test_latlong_typeerror(self): p = Proj("+proj=stere +lon_0=-39 +lat_0=90 +lat_ts=71.0 +ellps=WGS84") self.assertTrue(isinstance(p, Proj)) # if not patched this line raises a "TypeError: p2 must be a Proj class" with pytest.warns(FutureWarning): lon, lat = transform(p, p.to_latlong(), 200000, 400000) class ForwardInverseTest(unittest.TestCase): def test_fwd_inv(self): for pj in pj_list.keys(): with self.subTest(pj=pj): try: p = Proj(proj=pj) x, y = p(-30, 40) # note, for proj 4.9.2 or before the inverse projection # may be missing and pyproj 1.9.5.1 or before does not # test for this and will # give a segmentation fault at this point: lon, lat = p(x, y, inverse=True) except RuntimeError: pass # Tests for shared memory between Geod objects class GeodSharedMemoryBugTestIssue64(unittest.TestCase): def setUp(self): self.g = Geod(ellps="clrk66") self.ga = self.g.a self.mercury = Geod(a=2439700) # Mercury 2000 ellipsoid # Mercury is much smaller than earth. def test_not_shared_memory(self): self.assertEqual(self.ga, self.g.a) # mecury must have a different major axis from earth self.assertNotEqual(self.g.a, self.mercury.a) self.assertNotEqual(self.g.b, self.mercury.b) self.assertNotEqual(self.g.sphere, self.mercury.sphere) self.assertNotEqual(self.g.f, self.mercury.f) self.assertNotEqual(self.g.es, self.mercury.es) # initstrings were not shared in issue #64 self.assertNotEqual(self.g.initstring, self.mercury.initstring) def test_distances(self): # note calculated distance was not an issue with #64, # but it still a shared memory test boston_lat = 42.0 + (15.0 / 60.0) boston_lon = -71.0 - (7.0 / 60.0) portland_lat = 45.0 + (31.0 / 60.0) portland_lon = -123.0 - (41.0 / 60.0) az12, az21, dist_g = self.g.inv( boston_lon, boston_lat, portland_lon, portland_lat ) az12, az21, dist_mercury = self.mercury.inv( boston_lon, boston_lat, portland_lon, portland_lat ) self.assertLess(dist_mercury, dist_g) class ReprTests(unittest.TestCase): # test __repr__ for Proj object def test_repr(self): p = Proj(proj="latlong", preserve_units=True) expected = ( "\n" "Description: PROJ-based coordinate operation\n" "Area of Use:\n" "- undefined" ) self.assertEqual(repr(p), expected) # test __repr__ for Geod object def test_sphere(self): # ellipse is Venus 2000 (IAU2000:29900), which is a sphere g = Geod("+a=6051800 +b=6051800") self.assertEqual(repr(g), "Geod('+a=6051800.0 +f=0.0')") # test __repr__ for Geod object def test_ellps_name_round_trip(self): # this could be done in a parameter fashion for ellps_name in pj_ellps: # skip tests, these ellipses NWL9D and WGS66 are the same if ellps_name in ("NWL9D", "WGS66"): continue p = Geod(ellps=ellps_name) expected = f"Geod(ellps='{ellps_name}')" self.assertEqual(repr(p), expected) class TestRadians(unittest.TestCase): """Tests issue #84""" def setUp(self): self.g = Geod(ellps="clrk66") self.boston_d = (-71.0 - (7.0 / 60.0), 42.0 + (15.0 / 60.0)) self.boston_r = (math.radians(self.boston_d[0]), math.radians(self.boston_d[1])) self.portland_d = (-123.0 - (41.0 / 60.0), 45.0 + (31.0 / 60.0)) self.portland_r = ( math.radians(self.portland_d[0]), math.radians(self.portland_d[1]), ) def test_inv_radians(self): # Get bearings and distance from Boston to Portland in degrees az12_d, az21_d, dist_d = self.g.inv( self.boston_d[0], self.boston_d[1], self.portland_d[0], self.portland_d[1], radians=False, ) # Get bearings and distance from Boston to Portland in radians az12_r, az21_r, dist_r = self.g.inv( self.boston_r[0], self.boston_r[1], self.portland_r[0], self.portland_r[1], radians=True, ) # Check they are equal self.assertAlmostEqual(az12_d, math.degrees(az12_r)) self.assertAlmostEqual(az21_d, math.degrees(az21_r)) self.assertAlmostEqual(dist_d, dist_r) def test_fwd_radians(self): # Get bearing and distance to Portland az12_d, az21_d, dist = self.g.inv( self.boston_d[0], self.boston_d[1], self.portland_d[0], self.portland_d[1], radians=False, ) # Calculate Portland's lon/lat from bearing and distance in degrees endlon_d, endlat_d, backaz_d = self.g.fwd( self.boston_d[0], self.boston_d[1], az12_d, dist, radians=False ) # Calculate Portland's lon/lat from bearing and distance in radians for return_back_azimuth in [False, True]: endlon_r, endlat_r, backaz_r = self.g.fwd( self.boston_r[0], self.boston_r[1], math.radians(az12_d), dist, radians=True, return_back_azimuth=return_back_azimuth, ) if not return_back_azimuth: backaz_r = reverse_azimuth(backaz_r, radians=True) # Check they are equal self.assertAlmostEqual(endlon_d, math.degrees(endlon_r)) self.assertAlmostEqual(endlat_d, math.degrees(endlat_r)) self.assertAlmostEqual(backaz_d, math.degrees(backaz_r)) # Check to make sure we're back in Portland self.assertAlmostEqual(endlon_d, self.portland_d[0]) self.assertAlmostEqual(endlat_d, self.portland_d[1]) def test_npts_radians(self): # Calculate 10 points between Boston and Portland in degrees points_d = self.g.npts( lon1=self.boston_d[0], lat1=self.boston_d[1], lon2=self.portland_d[0], lat2=self.portland_d[1], npts=10, radians=False, ) # Calculate 10 points between Boston and Portland in radians points_r = self.g.npts( lon1=self.boston_r[0], lat1=self.boston_r[1], lon2=self.portland_r[0], lat2=self.portland_r[1], npts=10, radians=True, ) # Check they are equal for index, dpoint in enumerate(points_d): self.assertAlmostEqual(dpoint[0], math.degrees(points_r[index][0])) self.assertAlmostEqual(dpoint[1], math.degrees(points_r[index][1])) class Geod_NaN_Issue112_Test(unittest.TestCase): # Test for Issue #112; Geod should silently propagate NaNs in input # to the output. def test_geod_nans(self): g = Geod(ellps="clrk66") (azi1, azi2, s12) = g.inv(43, 10, float("nan"), 20) self.assertTrue(azi1 != azi1) self.assertTrue(azi2 != azi2) self.assertTrue(s12 != s12) (azi1, azi2, s12) = g.inv(43, 10, 53, float("nan")) self.assertTrue(azi1 != azi1) self.assertTrue(azi2 != azi2) self.assertTrue(s12 != s12) # Illegal latitude is treated as NaN (azi1, azi2, s12) = g.inv(43, 10, 53, 91) self.assertTrue(azi1 != azi1) self.assertTrue(azi2 != azi2) self.assertTrue(s12 != s12) (lon2, lat2, azi2) = g.fwd(43, 10, float("nan"), 1e6) self.assertTrue(lon2 != lon2) self.assertTrue(lat2 != lat2) self.assertTrue(azi2 != azi2) (lon2, lat2, azi2) = g.fwd(43, 10, 20, float("nan")) self.assertTrue(lon2 != lon2) self.assertTrue(lat2 != lat2) self.assertTrue(azi2 != azi2) (lon2, lat2, azi2) = g.fwd(43, float("nan"), 20, 1e6) self.assertTrue(lon2 != lon2) self.assertTrue(lat2 != lat2) self.assertTrue(azi2 != azi2) # Illegal latitude is treated as NaN (lon2, lat2, azi2) = g.fwd(43, 91, 20, 1e6) self.assertTrue(lon2 != lon2) self.assertTrue(lat2 != lat2) self.assertTrue(azi2 != azi2) # Only lon2 is NaN (lon2, lat2, azi2) = g.fwd(float("nan"), 10, 20, 1e6) self.assertTrue(lon2 != lon2) self.assertTrue(lat2 == lat2) self.assertTrue(azi2 == azi2) def test_proj_equals(): assert Proj(4326) == Proj("epsg:4326") assert Proj(4326) != Proj("epsg:3857") with pytest.warns(UserWarning): assert Proj(4326) == Proj(Proj("epsg:4326").crs.to_proj4()) def test_initialize_proj_crs_no_proj4(): proj = Proj( { "a": 6371229.0, "b": 6371229.0, "lon_0": -10.0, "o_lat_p": 30.0, "o_lon_p": 0.0, "o_proj": "longlat", "proj": "ob_tran", } ) assert proj.srs.startswith("+proj=ob_tran") def test_initialize_proj_crs_no_plus(): proj = Proj("proj=lonlat") assert proj.crs.srs == "proj=lonlat type=crs" def test_initialize_projparams_with_kwargs(): proj_mixed_args = Proj("+proj=utm +zone=10", ellps="WGS84") proj_positional = Proj("+proj=utm +zone=10 +ellps=WGS84") assert proj_mixed_args.is_exact_same(proj_positional) def test_equals_different_type(): assert Proj("epsg:4326") != "" def test_is_exact_same_different_type(): assert not Proj("epsg:4326").is_exact_same(None) def test_reset_errno(): proj = Proj( {"proj": "laea", "lat_0": -90, "lon_0": 0, "a": 6371228.0, "units": "m"} ) assert not proj.crs.is_geographic assert proj(0, 0, inverse=True, errcheck=True) == (0.0, -90.0) @pytest.mark.parametrize("radians", [False, True]) def test_get_factors__2d_input(radians): transformer = Proj(3857) longitude = numpy.array([[0, 1], [2, 3]]) latitude = numpy.array([[1, 2], [3, 4]]) if radians: longitude = numpy.radians(longitude) latitude = numpy.radians(latitude) factors = transformer.get_factors( longitude=longitude, latitude=latitude, radians=radians ) assert_almost_equal( factors.meridional_scale, [[1.0001523, 1.0006095], [1.0013723, 1.0024419]] ) assert_almost_equal( factors.parallel_scale, [[1.0001523, 1.0006095], [1.0013723, 1.0024419]] ) assert_almost_equal( factors.areal_scale, [[1.00030468, 1.00121946], [1.00274658, 1.00488976]] ) assert_almost_equal(factors.angular_distortion, [[0, 0], [0, 0]], decimal=5) assert_almost_equal( factors.meridian_parallel_angle, [[89.99, 90], [90, 90]], decimal=2 ) assert_almost_equal(factors.meridian_convergence, [[0, 0], [0, 0]]) assert_almost_equal( factors.tissot_semimajor, [[1.00015234, 1.00060955], [1.00137235, 1.0024419]] ) assert_almost_equal( factors.tissot_semiminor, [[1.00015232, 1.00060953], [1.00137235, 1.0024419]] ) assert_almost_equal(factors.dx_dlam, [[1, 1], [1, 1]]) assert_almost_equal(factors.dx_dphi, [[0, 0], [0, 0]]) assert_almost_equal(factors.dy_dlam, [[0, 0], [0, 0]]) assert_almost_equal( factors.dy_dphi, [[1.00015233, 1.00060954], [1.00137235, 1.0024419]] ) def test_get_factors(): transformer = Proj(3717) factors = transformer.get_factors(-120, 34) assert_almost_equal(factors.meridional_scale, 1.0005466) assert_almost_equal(factors.parallel_scale, 1.0005466) assert_almost_equal(factors.areal_scale, 1.00109349) assert_almost_equal(factors.angular_distortion, 0) assert_almost_equal(factors.meridian_parallel_angle, 90) assert_almost_equal(factors.meridian_convergence, 1.67864770) assert_almost_equal(factors.tissot_semimajor, 1.00055, decimal=5) assert_almost_equal(factors.tissot_semiminor, 1.00055, decimal=5) assert_almost_equal(factors.dx_dlam, 0.8300039) assert_almost_equal(factors.dx_dphi, -0.0292052) assert_almost_equal(factors.dy_dlam, 0.0243244) assert_almost_equal(factors.dy_dphi, 0.9965495) def test_get_factors__nan_inf(): transformer = Proj(3857) factors = transformer.get_factors( longitude=[0, numpy.nan, numpy.inf, 0], latitude=[numpy.nan, 2, 2, numpy.inf] ) assert_almost_equal( factors.meridional_scale, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.parallel_scale, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.areal_scale, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.angular_distortion, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.meridian_parallel_angle, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.meridian_convergence, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.tissot_semimajor, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal( factors.tissot_semiminor, [numpy.inf, numpy.inf, numpy.inf, numpy.inf] ) assert_almost_equal(factors.dx_dlam, [numpy.inf, numpy.inf, numpy.inf, numpy.inf]) assert_almost_equal(factors.dx_dphi, [numpy.inf, numpy.inf, numpy.inf, numpy.inf]) assert_almost_equal(factors.dy_dlam, [numpy.inf, numpy.inf, numpy.inf, numpy.inf]) assert_almost_equal(factors.dy_dphi, [numpy.inf, numpy.inf, numpy.inf, numpy.inf]) def test_get_factors__errcheck(): transformer = Proj(3857) with pytest.raises(ProjError): transformer.get_factors(longitude=40, latitude=70, errcheck=True, radians=True) def test_numpy_bool_kwarg_false(): # Issue 564 south = numpy.array(50) < 0 proj = Proj( proj="utm", zone=32, ellipsis="WGS84", datum="WGS84", units="m", south=south ) assert "south" not in proj.srs def test_numpy_bool_kwarg_true(): # Issue 564 south = numpy.array(50) > 0 proj = Proj( proj="utm", zone=32, ellipsis="WGS84", datum="WGS84", units="m", south=south ) assert "+south " in proj.srs @patch.dict("os.environ", {"PROJ_NETWORK": "ON"}, clear=True) def test_network__disable(): with proj_network_env(): pyproj.network.set_network_enabled(active=False) transformer = Proj(3857) assert transformer.is_network_enabled is False @patch.dict("os.environ", {"PROJ_NETWORK": "OFF"}, clear=True) def test_network__enable(): with proj_network_env(): pyproj.network.set_network_enabled(active=True) transformer = Proj(3857) assert transformer.is_network_enabled is True def test_network__default(): with proj_network_env(): pyproj.network.set_network_enabled() transformer = Proj(3857) assert transformer.is_network_enabled == ( os.environ.get("PROJ_NETWORK") == "ON" ) def test_radians(): proj = Proj( {"proj": "lcc", "R": 6371200, "lat_1": 50, "lat_2": 50, "lon_0": -107}, preserve_units=False, ) assert_almost_equal( proj(math.radians(-145.5), math.radians(1.0), radians=True), (-5632642.22547495, 1636571.4883145525), ) def test_proj_multithread(): # https://github.com/pyproj4/pyproj/issues/782 trans = Proj("EPSG:3857") def transform(num): return trans(1, 2) with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: for result in executor.map(transform, range(10)): pass pyproj-3.7.1/test/test_show_versions.py000066400000000000000000000023411475425760300203620ustar00rootroot00000000000000from pyproj._show_versions import ( _get_deps_info, _get_proj_info, _get_sys_info, show_versions, ) def test_get_proj_info(): pyproj_info = _get_proj_info() assert "pyproj" in pyproj_info assert "PROJ (runtime)" in pyproj_info assert "PROJ (compiled)" in pyproj_info assert "data dir" in pyproj_info assert "user_data_dir" in pyproj_info assert "PROJ DATA (recommended version)" in pyproj_info assert "PROJ Database" in pyproj_info assert "EPSG Database" in pyproj_info assert "ESRI Database" in pyproj_info assert "IGNF Database" in pyproj_info def test_get_sys_info(): sys_info = _get_sys_info() assert "python" in sys_info assert "executable" in sys_info assert "machine" in sys_info def test_get_deps_info(): deps_info = _get_deps_info() assert "pip" in deps_info assert "setuptools" in deps_info assert "certifi" in deps_info assert "Cython" in deps_info def test_show_versions_with_proj(capsys): show_versions() out, err = capsys.readouterr() assert "System" in out assert "python" in out assert "PROJ (runtime)" in out assert "PROJ (compiled)" in out assert "data dir" in out assert "Python deps" in out pyproj-3.7.1/test/test_sync.py000066400000000000000000000143661475425760300164400ustar00rootroot00000000000000from datetime import datetime, timedelta from unittest.mock import MagicMock, patch from urllib.error import URLError import pytest from pyproj.aoi import BBox from pyproj.sync import ( _download_resource_file, _load_grid_geojson, _sha256sum, get_transform_grid_list, ) @pytest.mark.network def test_get_transform_grid_list(): grids = get_transform_grid_list(include_already_downloaded=True) assert len(grids) > 200 @pytest.mark.network def test_get_transform_grid_list__bbox__antimeridian(): grids = get_transform_grid_list( bbox=BBox(170, -90, -170, 90), include_already_downloaded=True ) assert len(grids) > 10 source_ids = set() for grid in grids: source_ids.add(grid["properties"]["source_id"]) assert sorted(source_ids) == [ "au_ga", "nc_dittt", "nz_linz", "us_nga", "us_noaa", ] @pytest.mark.network def test_get_transform_grid_list__bbox__out_of_bounds(): grids = get_transform_grid_list( bbox=BBox(170, -90, 190, 90), include_already_downloaded=True ) assert len(grids) > 10 source_ids = set() for grid in grids: source_ids.add(grid["properties"]["source_id"]) assert sorted(source_ids) == [ "au_ga", "nc_dittt", "nz_linz", "us_nga", "us_noaa", ] @pytest.mark.network def test_get_transform_grid_list__source_id(): grids = get_transform_grid_list( bbox=BBox(170, -90, -170, 90), source_id="us_noaa", include_already_downloaded=True, ) assert len(grids) > 5 source_ids = set() for grid in grids: source_ids.add(grid["properties"]["source_id"]) assert sorted(source_ids) == ["us_noaa"] @pytest.mark.network def test_get_transform_grid_list__contains(): grids = get_transform_grid_list( bbox=BBox(170, -90, -170, 90), spatial_test="contains", include_already_downloaded=True, ) assert len(grids) > 5 source_ids = set() for grid in grids: source_ids.add(grid["properties"]["source_id"]) assert sorted(source_ids) == ["nz_linz"] @pytest.mark.network def test_get_transform_grid_list__file(): grids = get_transform_grid_list( filename="us_noaa_alaska", include_already_downloaded=True ) assert len(grids) == 1 assert grids[0]["properties"]["name"] == "us_noaa_alaska.tif" @pytest.mark.network def test_get_transform_grid_list__area_of_use(): grids = get_transform_grid_list(area_of_use="USA", include_already_downloaded=True) assert len(grids) > 10 for grid in grids: assert "USA" in grid["properties"]["area_of_use"] def test_sha256sum(tmp_path): test_file = tmp_path / "test.file" test_file.write_text("TEST") assert ( _sha256sum(test_file) == "94ee059335e587e501cc4bf90613e0814f00a7b08bc7c648fd865a2af6a22cc2" ) @patch("pyproj.sync.urlretrieve", autospec=True) @pytest.mark.parametrize("verbose", [True, False]) def test_download_resource_file(urlretrieve_mock, verbose, tmp_path, capsys): def dummy_urlretrieve(url, local_path): with open(local_path, "w") as testf: testf.write("TEST") urlretrieve_mock.side_effect = dummy_urlretrieve _download_resource_file( file_url="test_url", short_name="test_file.txt", directory=tmp_path, verbose=verbose, sha256="94ee059335e587e501cc4bf90613e0814f00a7b08bc7c648fd865a2af6a22cc2", ) urlretrieve_mock.assert_called_with("test_url", tmp_path / "test_file.txt.part") captured = capsys.readouterr() if not verbose: assert captured.out == "" else: assert captured.out == "Downloading: test_url\n" expected_file = tmp_path / "test_file.txt" assert expected_file.exists() assert ( _sha256sum(expected_file) == "94ee059335e587e501cc4bf90613e0814f00a7b08bc7c648fd865a2af6a22cc2" ) @patch("pyproj.sync.urlretrieve", autospec=True) def test_download_resource_file__nosha256(urlretrieve_mock, tmp_path): def dummy_urlretrieve(url, local_path): local_path.touch() urlretrieve_mock.side_effect = dummy_urlretrieve _download_resource_file( file_url="test_url", short_name="test_file.txt", directory=tmp_path ) urlretrieve_mock.assert_called_with("test_url", tmp_path / "test_file.txt.part") expected_file = tmp_path / "test_file.txt" assert expected_file.exists() @patch("pyproj.sync.urlretrieve", autospec=True) def test_download_resource_file__exception(urlretrieve_mock, tmp_path): def dummy_urlretrieve(url, local_path): local_path.touch() raise URLError("Test") urlretrieve_mock.side_effect = dummy_urlretrieve with pytest.raises(URLError): _download_resource_file( file_url="test_url", short_name="test_file.txt", directory=str(tmp_path), verbose=False, sha256="test", ) urlretrieve_mock.assert_called_with("test_url", tmp_path / "test_file.txt.part") assert not tmp_path.joinpath("test_file.txt.part").exists() assert not tmp_path.joinpath("test_file.txt").exists() @patch("pyproj.sync.urlretrieve", autospec=True) def test_download_resource_file__bad_sha256sum(urlretrieve_mock, tmp_path): def dummy_urlretrieve(url, local_path): local_path.touch() urlretrieve_mock.side_effect = dummy_urlretrieve with pytest.raises(RuntimeError, match="SHA256 mismatch: test_file.txt"): _download_resource_file( file_url="test_url", short_name="test_file.txt", directory=tmp_path, verbose=False, sha256="test", ) urlretrieve_mock.assert_called_with("test_url", tmp_path / "test_file.txt.part") assert not tmp_path.joinpath("test_file.txt.part").exists() assert not tmp_path.joinpath("test_file.txt").exists() @pytest.mark.network @patch("pyproj.sync.Path.stat") def test__load_grid_geojson_old_file(stat_mock, tmp_path): return_timestamp = MagicMock() return_timestamp.st_mtime = (datetime.now() - timedelta(days=2)).timestamp() stat_mock.return_value = return_timestamp tmp_path.joinpath("files.geojson").touch() grids = _load_grid_geojson(target_directory=tmp_path) assert sorted(grids) == ["features", "name", "type"] pyproj-3.7.1/test/test_transform.py000066400000000000000000000125201475425760300174650ustar00rootroot00000000000000import numpy import pytest from numpy.testing import assert_allclose, assert_almost_equal from pyproj import Proj, __proj_version__, transform from test.conftest import grids_available def test_transform(): # convert awips221 grid to awips218 coordinate system # (grids defined at http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html) nx = 614 ny = 428 dx = 12190.58 dy = dx awips221 = Proj(proj="lcc", R=6371200, lat_1=50, lat_2=50, lon_0=-107) print("proj4 library version = ", __proj_version__) llcrnrx, llcrnry = awips221(-145.5, 1) awips221 = Proj( proj="lcc", R=6371200, lat_1=50, lat_2=50, lon_0=-107, x_0=-llcrnrx, y_0=-llcrnry, ) assert_allclose(awips221(-145.5, 1), (0, 0), atol=1e-4) awips218 = Proj(proj="lcc", R=6371200, lat_1=25, lat_2=25, lon_0=-95) llcrnrx, llcrnry = awips218(-133.459, 12.19) awips218 = Proj( proj="lcc", R=6371200, lat_1=25, lat_2=25, lon_0=-95, x_0=-llcrnrx, y_0=-llcrnry ) assert_allclose(awips218(-133.459, 12.19), (0, 0), atol=1e-4) x1 = dx * numpy.indices((ny, nx), "f")[1, :, :] y1 = dy * numpy.indices((ny, nx), "f")[0, :, :] print("max/min x and y for awips218 grid") print(numpy.minimum.reduce(numpy.ravel(x1)), numpy.maximum.reduce(numpy.ravel(x1))) print(numpy.minimum.reduce(numpy.ravel(y1)), numpy.maximum.reduce(numpy.ravel(y1))) with pytest.warns(FutureWarning): x2, y2 = transform(awips218, awips221, x1, y1) print("max/min x and y for awips218 grid in awips221 coordinates") print(numpy.minimum.reduce(numpy.ravel(x2)), numpy.maximum.reduce(numpy.ravel(x2))) print(numpy.minimum.reduce(numpy.ravel(y2)), numpy.maximum.reduce(numpy.ravel(y2))) with pytest.warns(FutureWarning): x3, y3 = transform(awips221, awips218, x2, y2) print("error for reverse transformation back to awips218 coords") print("(should be close to zero)") assert_allclose(numpy.minimum.reduce(numpy.ravel(x3 - x1)), 0, atol=1e-4) assert_allclose(numpy.maximum.reduce(numpy.ravel(x3 - x1)), 0, atol=1e-4) assert_allclose(numpy.minimum.reduce(numpy.ravel(y3 - y1)), 0, atol=1e-4) assert_allclose(numpy.maximum.reduce(numpy.ravel(y3 - y1)), 0, atol=1e-4) def test_transform_single_point_nad83_to_nad27(): # projection 1: UTM zone 15, grs80 ellipse, NAD83 datum # (defined by epsg code 26915) p1 = Proj("epsg:26915", preserve_units=False) # projection 2: UTM zone 15, clrk66 ellipse, NAD27 datum p2 = Proj("epsg:26715", preserve_units=False) # find x,y of Jefferson City, MO. x1, y1 = p1(-92.199881, 38.56694) # transform this point to projection 2 coordinates. x2, y2 = transform(p1, p2, x1, y1) assert_almost_equal( (x1, y1), (569704.566, 4269024.671), decimal=3, ) expected_xy2 = (569720, 4268813) if ( grids_available( "us_noaa_nadcon5_nad27_nad83_1986_conus.tif", check_network=False ) or grids_available() ): expected_xy2 = (569722, 4268814) elif grids_available( "ca_nrc_ntv2_0.tif", "ca_nrc_ntv1_can.tif", check_network=False ): expected_xy2 = (569706, 4268817) elif grids_available("us_noaa_conus.tif", check_network=False): expected_xy2 = (569722, 4268814) assert_almost_equal( (x2, y2), expected_xy2, decimal=0, ) assert_almost_equal( p2(x2, y2, inverse=True, errcheck=True), (-92.200, 38.567), decimal=3, ) def test_transform_tuple_nad83_to_nad27(): # projection 1: UTM zone 15, grs80 ellipse, NAD83 datum # (defined by epsg code 26915) p1 = Proj("epsg:26915", preserve_units=False) # projection 2: UTM zone 15, clrk66 ellipse, NAD27 datum p2 = Proj("epsg:26715", preserve_units=False) # process 3 points at a time in a tuple lats = (38.83, 39.32, 38.75) # Columbia, KC and StL Missouri lons = (-92.22, -94.72, -90.37) x1, y1 = p1(lons, lats) x2, y2 = transform(p1, p2, x1, y1) assert_almost_equal( x1, (567703.344, 351730.944, 728553.093), decimal=3, ) assert_almost_equal( y1, (4298200.739, 4353698.725, 4292319.005), decimal=3, ) expected_x2 = (567719, 351748, 728568) expected_y2 = (4297989, 4353487, 4292108) if ( grids_available( "us_noaa_nadcon5_nad27_nad83_1986_conus.tif", check_network=False ) or grids_available() ): expected_x2 = (567721, 351747, 728569) expected_y2 = (4297989, 4353489, 4292106) elif grids_available( "ca_nrc_ntv2_0.tif", "ca_nrc_ntv1_can.tif", check_network=False ): expected_x2 = (567705, 351727, 728558) expected_y2 = (4297993, 4353490, 4292111) elif grids_available("us_noaa_conus.tif", check_network=False): expected_x2 = (567721, 351747, 728569.133) expected_y2 = (4297989, 4353489, 4292106) assert_almost_equal( x2, expected_x2, decimal=0, ) assert_almost_equal( y2, expected_y2, decimal=0, ) lons2, lats2 = p2(x2, y2, inverse=True, errcheck=True) assert_almost_equal( lons2, (-92.220, -94.720, -90.370), decimal=3, ) assert_almost_equal( lats2, (38.830, 39.320, 38.750), decimal=3, ) pyproj-3.7.1/test/test_transformer.py000066400000000000000000001546711475425760300200320ustar00rootroot00000000000000import concurrent.futures import os import pickle from array import array from functools import partial from glob import glob from itertools import permutations from pathlib import Path from unittest.mock import call, patch import numpy import pytest from numpy.testing import assert_almost_equal, assert_array_equal import pyproj from pyproj import CRS, Proj, Transformer, itransform, transform from pyproj.datadir import append_data_dir from pyproj.enums import TransformDirection from pyproj.exceptions import ProjError from pyproj.transformer import AreaOfInterest, TransformerGroup from test.conftest import PROJ_GTE_93, grids_available, proj_env, proj_network_env def test_tranform_wgs84_to_custom(): custom_proj = pyproj.Proj( "+proj=geos +lon_0=0.000000 +lat_0=0 +h=35807.414063" " +a=6378.169000 +b=6356.583984" ) wgs84 = pyproj.Proj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") lat, lon = 51.04715, 3.23406 with pytest.warns(FutureWarning): xx, yy = pyproj.transform(wgs84, custom_proj, lon, lat) assert f"{xx:.3f} {yy:.3f}" == "212.623 4604.975" @pytest.mark.grid def test_transform_wgs84_to_alaska(): with pytest.warns(FutureWarning): lat_lon_proj = pyproj.Proj(init="epsg:4326", preserve_units=False) alaska_aea_proj = pyproj.Proj(init="epsg:2964", preserve_units=False) test = (-179.72638, 49.752533) with pytest.warns(FutureWarning): xx, yy = pyproj.transform(lat_lon_proj, alaska_aea_proj, *test) if grids_available("us_noaa_alaska.tif"): assert f"{xx:.3f} {yy:.3f}" == "-1824924.495 330822.800" else: assert f"{xx:.3f} {yy:.3f}" == "-1825155.697 330730.391" @pytest.mark.skip(reason="https://github.com/OSGeo/PROJ/issues/2425") def test_illegal_transformation(): # issue 202 with pytest.warns(FutureWarning): p1 = pyproj.Proj(init="epsg:4326") p2 = pyproj.Proj(init="epsg:3857") with pytest.warns(FutureWarning): xx, yy = pyproj.transform( p1, p2, (-180, -180, 180, 180, -180), (-90, 90, 90, -90, -90) ) assert numpy.all(numpy.isinf(xx)) assert numpy.all(numpy.isinf(yy)) with pytest.warns(FutureWarning), pytest.raises(ProjError): pyproj.transform( p1, p2, (-180, -180, 180, 180, -180), (-90, 90, 90, -90, -90), errcheck=True ) def test_lambert_conformal_transform(): # issue 207 with pytest.warns(FutureWarning): Midelt = pyproj.Proj(init="epsg:26191") WGS84 = pyproj.Proj(init="epsg:4326") E = 567623.931 N = 256422.787 h = 1341.467 with pytest.warns(FutureWarning): Long1, Lat1, H1 = pyproj.transform(Midelt, WGS84, E, N, h, radians=False) assert_almost_equal((Long1, Lat1, H1), (-4.6753456, 32.902199, 1341.467), decimal=5) def test_4d_transform(scalar_and_array): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_almost_equal( transformer.transform( xx=scalar_and_array(3513638.19380), yy=scalar_and_array(778956.45250), zz=scalar_and_array(5248216.46900), tt=scalar_and_array(2008.75), ), ( scalar_and_array(3513638.1999428216), scalar_and_array(778956.4532640711), scalar_and_array(5248216.453456361), scalar_and_array(2008.75), ), ) def test_2d_with_time_transform(scalar_and_array): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_almost_equal( transformer.transform( xx=scalar_and_array(3513638.19380), yy=scalar_and_array(778956.45250), tt=scalar_and_array(2008.75), ), ( scalar_and_array(3513638.1999428216), scalar_and_array(778956.4532640711), scalar_and_array(2008.75), ), ) def test_4d_transform_crs_obs1(scalar_and_array): transformer = Transformer.from_proj(7789, 8401) assert_almost_equal( transformer.transform( xx=scalar_and_array(3496737.2679), yy=scalar_and_array(743254.4507), zz=scalar_and_array(5264462.9620), tt=scalar_and_array(2019.0), ), ( scalar_and_array(3496737.757717311), scalar_and_array(743253.9940103051), scalar_and_array(5264462.701132784), scalar_and_array(2019.0), ), ) def test_4d_transform_orginal_crs_obs1(): with pytest.warns(FutureWarning): assert_almost_equal( transform( 7789, 8401, x=3496737.2679, y=743254.4507, z=5264462.9620, tt=2019.0 ), (3496737.757717311, 743253.9940103051, 5264462.701132784, 2019.0), ) def test_4d_transform_crs_obs2(scalar_and_array): transformer = Transformer.from_proj(4896, 7930) assert_almost_equal( transformer.transform( xx=scalar_and_array(3496737.2679), yy=scalar_and_array(743254.4507), zz=scalar_and_array(5264462.9620), tt=scalar_and_array(2019.0), ), ( scalar_and_array(3496737.7857162016), scalar_and_array(743254.0394113371), scalar_and_array(5264462.643659916), scalar_and_array(2019.0), ), ) def test_2d_with_time_transform_crs_obs2(scalar_and_array): transformer = Transformer.from_proj(4896, 7930) assert_almost_equal( transformer.transform( xx=scalar_and_array(3496737.2679), yy=scalar_and_array(743254.4507), tt=scalar_and_array(2019.0), ), ( scalar_and_array(3496737.4105305015), scalar_and_array(743254.1014318303), scalar_and_array(2019.0), ), ) def test_2d_with_time_transform_original_crs_obs2(): with pytest.warns(FutureWarning): assert_almost_equal( transform(4896, 7930, x=3496737.2679, y=743254.4507, tt=2019.0), (3496737.4105305015, 743254.1014318303, 2019.0), ) def test_4d_itransform(): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_almost_equal( list( transformer.itransform( [(3513638.19380, 778956.45250, 5248216.46900, 2008.75)] ) ), [(3513638.1999428216, 778956.4532640711, 5248216.453456361, 2008.75)], ) def test_3d_time_itransform(): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_almost_equal( list( transformer.itransform( [(3513638.19380, 778956.45250, 2008.75)], time_3rd=True ) ), [(3513638.1999428216, 778956.4532640711, 2008.75)], ) def test_4d_itransform_orginal_crs_obs1(): with pytest.warns(FutureWarning): assert_almost_equal( list( itransform( 7789, 8401, [(3496737.2679, 743254.4507, 5264462.9620, 2019.0)] ) ), [(3496737.757717311, 743253.9940103051, 5264462.701132784, 2019.0)], ) def test_2d_with_time_itransform_original_crs_obs2(): with pytest.warns(FutureWarning): assert_almost_equal( list( itransform( 4896, 7930, [(3496737.2679, 743254.4507, 2019.0)], time_3rd=True ) ), [(3496737.4105305015, 743254.1014318303, 2019.0)], ) def test_itransform_time_3rd_invalid(): with ( pytest.warns(FutureWarning), pytest.raises(ValueError, match="'time_3rd' is only valid for 3 coordinates."), ): list( itransform( 7789, 8401, [(3496737.2679, 743254.4507, 5264462.9620, 2019.0)], time_3rd=True, ) ) with ( pytest.warns(FutureWarning), pytest.raises(ValueError, match="'time_3rd' is only valid for 3 coordinates."), ): list(itransform(7789, 8401, [(3496737.2679, 743254.4507)], time_3rd=True)) def test_transform_no_error(): with pytest.warns(FutureWarning): pj = Proj(init="epsg:4555") pjx, pjy = pj(116.366, 39.867) with pytest.warns(FutureWarning): transform(pj, Proj(4326), pjx, pjy, radians=True, errcheck=True) def test_itransform_no_error(): with pytest.warns(FutureWarning): pj = Proj(init="epsg:4555") pjx, pjy = pj(116.366, 39.867) with pytest.warns(FutureWarning): list(itransform(pj, Proj(4326), [(pjx, pjy)], radians=True, errcheck=True)) def test_transform_no_exception(): # issue 249 with pytest.warns(FutureWarning): transformer = Transformer.from_proj("+init=epsg:4326", "+init=epsg:27700") transformer.transform(1.716073972, 52.658007833, errcheck=True) transformer.itransform([(1.716073972, 52.658007833)], errcheck=True) def test_transform__out_of_bounds(scalar_and_array): with pytest.warns(FutureWarning): transformer = Transformer.from_proj("+init=epsg:4326", "+init=epsg:27700") with pytest.raises(pyproj.exceptions.ProjError): transformer.transform( scalar_and_array(100000), scalar_and_array(100000), errcheck=True ) def test_transform_radians(): with pytest.warns(FutureWarning): WGS84 = pyproj.Proj("+init=EPSG:4326") ECEF = pyproj.Proj(proj="geocent", ellps="WGS84", datum="WGS84") with pytest.warns(FutureWarning): assert_almost_equal( pyproj.transform( ECEF, WGS84, -2704026.010, -4253051.810, 3895878.820, radians=True ), (-2.137113493845668, 0.6613203738996222, -20.531156923621893), ) assert_almost_equal( pyproj.transform( WGS84, ECEF, -2.137113493845668, 0.6613203738996222, -20.531156923621893, radians=True, ), (-2704026.010, -4253051.810, 3895878.820), ) def test_itransform_radians(): with pytest.warns(FutureWarning): WGS84 = pyproj.Proj("+init=EPSG:4326") ECEF = pyproj.Proj(proj="geocent", ellps="WGS84", datum="WGS84") with pytest.warns(FutureWarning): assert_almost_equal( list( pyproj.itransform( ECEF, WGS84, [(-2704026.010, -4253051.810, 3895878.820)], radians=True, ) ), [(-2.137113493845668, 0.6613203738996222, -20.531156923621893)], ) assert_almost_equal( list( pyproj.itransform( WGS84, ECEF, [(-2.137113493845668, 0.6613203738996222, -20.531156923621893)], radians=True, ) ), [(-2704026.010, -4253051.810, 3895878.820)], ) def test_4d_transform__inverse(scalar_and_array): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_almost_equal( transformer.transform( xx=scalar_and_array(3513638.1999428216), yy=scalar_and_array(778956.4532640711), zz=scalar_and_array(5248216.453456361), tt=scalar_and_array(2008.75), direction=TransformDirection.INVERSE, ), ( scalar_and_array(3513638.19380), scalar_and_array(778956.45250), scalar_and_array(5248216.46900), scalar_and_array(2008.75), ), ) def test_transform_direction(scalar_and_array): forward_transformer = Transformer.from_crs(4326, 3857) inverse_transformer = Transformer.from_crs(3857, 4326) assert_array_equal( inverse_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction=TransformDirection.INVERSE, ), forward_transformer.transform(scalar_and_array(-33), scalar_and_array(24)), ) ident_transformer = Transformer.from_crs(4326, 3857) assert_array_equal( ident_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction=TransformDirection.IDENT, ), (scalar_and_array(-33), scalar_and_array(24)), ) def test_always_xy__transformer(scalar_and_array): transformer = Transformer.from_crs(2193, 4326, always_xy=True) assert_almost_equal( transformer.transform(scalar_and_array(1625350), scalar_and_array(5504853)), ( scalar_and_array(173.29964730317386), scalar_and_array(-40.60674802693758), ), ) def test_always_xy__transform(): with pytest.warns(FutureWarning): assert_almost_equal( transform(2193, 4326, 1625350, 5504853, always_xy=True), (173.29964730317386, -40.60674802693758), ) def test_always_xy__itransform(): with pytest.warns(FutureWarning): assert_almost_equal( list(itransform(2193, 4326, [(1625350, 5504853)], always_xy=True)), [(173.29964730317386, -40.60674802693758)], ) @pytest.mark.parametrize("empty_array", [(), [], numpy.array([])]) def test_transform_empty_array_xy(empty_array): transformer = Transformer.from_crs(2193, 4326) assert_array_equal( transformer.transform(empty_array, empty_array), (empty_array, empty_array) ) @pytest.mark.parametrize("empty_array", [(), [], numpy.array([])]) def test_transform_empty_array_xyzt(empty_array): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert_array_equal( transformer.transform(empty_array, empty_array, empty_array, empty_array), (empty_array, empty_array, empty_array, empty_array), ) def test_transform_direction__string(scalar_and_array): forward_transformer = Transformer.from_crs(4326, 3857) inverse_transformer = Transformer.from_crs(3857, 4326) assert_array_equal( inverse_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="INVERSE" ), forward_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="FORWARD" ), ) ident_transformer = Transformer.from_crs(4326, 3857) assert_array_equal( ident_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="IDENT" ), (scalar_and_array(-33), scalar_and_array(24)), ) def test_transform_direction__string_lowercase(scalar_and_array): forward_transformer = Transformer.from_crs(4326, 3857) inverse_transformer = Transformer.from_crs(3857, 4326) assert_array_equal( inverse_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="inverse" ), forward_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="forward" ), ) ident_transformer = Transformer.from_crs(4326, 3857) assert_array_equal( ident_transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="ident" ), (scalar_and_array(-33), scalar_and_array(24)), ) def test_transform_direction__invalid(scalar_and_array): transformer = Transformer.from_crs(4326, 3857) with pytest.raises(ValueError, match="Invalid value"): transformer.transform( scalar_and_array(-33), scalar_and_array(24), direction="WHEREVER" ) def test_from_pipeline__non_transform_input(): with pytest.raises(ProjError, match="Input is not a transformation"): Transformer.from_pipeline("epsg:4326") def test_non_supported_initialization(): with pytest.raises(ProjError, match="Transformer must be initialized using"): Transformer() def test_pj_info_properties(): transformer = Transformer.from_crs(4326, 3857) assert transformer.name == "pipeline" assert transformer.description == "Popular Visualisation Pseudo-Mercator" assert transformer.definition.startswith("proj=pipeline") assert transformer.has_inverse assert transformer.accuracy == 0 def test_to_proj4(): transformer = Transformer.from_crs(4326, 3857) proj_string = transformer.to_proj4() assert "+proj=pipeline" in proj_string assert "\n" not in proj_string def test_to_proj4__pretty(): transformer = Transformer.from_crs(4326, 3857) proj_string = transformer.to_proj4(pretty=True) assert "+proj=pipeline" in proj_string assert "\n" in proj_string def test_to_wkt(): transformer = Transformer.from_crs(4326, 3857) assert transformer.to_wkt().startswith( 'CONVERSION["Popular Visualisation Pseudo-Mercator"' ) def test_str(): assert str(Transformer.from_crs(4326, 3857)).startswith("proj=pipeline") def test_repr(): assert repr(Transformer.from_crs(4326, 3857)) == ( "\n" "Description: Popular Visualisation Pseudo-Mercator\n" "Area of Use:\n" "- name: World.\n" "- bounds: (-180.0, -90.0, 180.0, 90.0)" ) @pytest.mark.grid def test_repr__conditional(): trans_repr = repr(Transformer.from_crs("EPSG:4326+3855", "EPSG:4979")) if grids_available("us_nga_egm08_25.tif"): assert trans_repr == ( "\n" "Description: unavailable until proj_trans is called\n" "Area of Use:\n- undefined" ) elif not PROJ_GTE_93: assert trans_repr == ( "\n" "Description: Transformation from EGM2008 height to WGS 84 " "(ballpark vertical transformation, without ellipsoid height " "to vertical height correction)\n" "Area of Use:\n- undefined" ) else: assert trans_repr == ( "\n" "Description: Transformation from EGM2008 height to WGS 84 " "(ballpark vertical transformation, without ellipsoid height " "to vertical height correction)\n" "Area of Use:\n" "- name: World.\n" "- bounds: (-180.0, -90.0, 180.0, 90.0)" ) def test_to_json_dict(): transformer = Transformer.from_crs(4326, 3857) json_dict = transformer.to_json_dict() assert json_dict["type"] == "Conversion" def test_to_json(): transformer = Transformer.from_crs(4326, 3857) json_data = transformer.to_json() assert "Conversion" in json_data assert "\n" not in json_data def test_to_json__pretty(): transformer = Transformer.from_crs(4326, 3857) json_data = transformer.to_json(pretty=True) assert "Conversion" in json_data assert json_data.startswith('{\n "') def test_to_json__pretty__indenation(): transformer = Transformer.from_crs(4326, 3857) json_data = transformer.to_json(pretty=True, indentation=4) assert "Conversion" in json_data assert json_data.startswith('{\n "') def test_transformer__operations(): transformer = TransformerGroup(28356, 7856).transformers[0] assert [op.name for op in transformer.operations] == [ "Inverse of Map Grid of Australia zone 56", "GDA94 to GDA2020 (1)", "Map Grid of Australia zone 56", ] def test_transformer__operations_missing(): assert Transformer.from_crs(7789, 8401).operations == () def test_transformer__operations__scope_remarks(): transformer = TransformerGroup(28356, 7856).transformers[0] assert transformer.scope is None assert [op.scope for op in transformer.operations] == [ "Engineering survey, topographic mapping.", "Transformation of GDA94 coordinates that have been derived " "through GNSS CORS.", "Engineering survey, topographic mapping.", ] assert [str(op.remarks)[:5].strip() for op in transformer.operations] == [ "Grid", "Scale", "Grid", ] @pytest.mark.grid def test_transformer__only_best(): transformer = Transformer.from_crs(4326, 2964, only_best=True) if not grids_available("ca_nrc_ntv2_0.tif"): with pytest.raises( ProjError, match="Grid ca_nrc_ntv2_0.tif is not available.", ): transformer.transform(60, -100, errcheck=True) def test_transformer_group(): trans_group = TransformerGroup(7789, 8401) assert len(trans_group.transformers) == 2 assert trans_group.transformers[0].name == "helmert" assert trans_group.transformers[1].description == ("ITRF2014 to ETRF2014 (1)") assert not trans_group.unavailable_operations assert trans_group.best_available @pytest.mark.grid def test_transformer_group__unavailable(): trans_group = TransformerGroup(4326, 2964) for transformer in trans_group.transformers: assert transformer.is_network_enabled == ( os.environ.get("PROJ_NETWORK") == "ON" ) if grids_available("us_noaa_alaska.tif", "ca_nrc_ntv2_0.tif", check_all=True): assert len(trans_group.unavailable_operations) == 0 assert len(trans_group.transformers) == 10 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert trans_group.best_available elif grids_available("us_noaa_alaska.tif"): assert len(trans_group.unavailable_operations) == 1 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert len(trans_group.transformers) == 9 assert trans_group.best_available elif grids_available("ca_nrc_ntv2_0.tif"): assert len(trans_group.unavailable_operations) == 1 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (7) + Alaska Albers" ) assert len(trans_group.transformers) == 9 assert not trans_group.best_available else: assert len(trans_group.unavailable_operations) == 2 assert ( trans_group.unavailable_operations[0].name == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert len(trans_group.transformers) == 8 assert not trans_group.best_available @pytest.mark.grid def test_transform_group__missing_best(): with pytest.warns(FutureWarning): lat_lon_proj = pyproj.Proj(init="epsg:4326", preserve_units=False) alaska_aea_proj = pyproj.Proj(init="epsg:2964", preserve_units=False) if not grids_available("ca_nrc_ntv2_0.tif"): with pytest.warns( UserWarning, match="Best transformation is not available due to missing Grid", ): trans_group = pyproj.transformer.TransformerGroup( lat_lon_proj.crs, alaska_aea_proj.crs ) assert not trans_group.best_available assert "ntv2_0" not in trans_group.transformers[0].definition assert "ntv2_0" in trans_group.unavailable_operations[0].to_proj4() else: # assuming all grids available or PROJ_NETWORK=ON trans_group = pyproj.transformer.TransformerGroup( lat_lon_proj.crs, alaska_aea_proj.crs ) assert trans_group.best_available assert "ntv2_0" in trans_group.transformers[0].definition @pytest.mark.grid def test_transform_group__area_of_interest(): def get_transformer_group(): return pyproj.transformer.TransformerGroup( 4326, 2964, area_of_interest=pyproj.transformer.AreaOfInterest( -136.46, 49.0, -60.72, 83.17 ), ) if not grids_available("ca_nrc_ntv2_0.tif"): with pytest.warns( UserWarning, match="Best transformation is not available due to missing Grid", ): trans_group = get_transformer_group() assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (13) + Alaska Albers" ) else: trans_group = get_transformer_group() assert trans_group.best_available assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (33) + Alaska Albers" ) @pytest.mark.grid def test_transformer_group__get_transform_crs(): tg = TransformerGroup("epsg:4258", "epsg:7415") if grids_available( "nl_nsgi_nlgeo2018.tif", "nl_nsgi_rdtrans2018.tif", check_all=True ): assert len(tg.transformers) == 2 else: assert len(tg.transformers) == 1 def test_transformer__area_of_interest(): transformer = Transformer.from_crs( "EPSG:7789", "EPSG:4936", area_of_interest=AreaOfInterest(-177.25, -44.64, -175.54, -43.3), ) assert ( transformer.description == "Ballpark geocentric translation from ITRF2014 to ETRS89" ) def test_transformer_proj__area_of_interest(): transformer = Transformer.from_proj( "EPSG:7789", "EPSG:4936", area_of_interest=AreaOfInterest(-177.25, -44.64, -175.54, -43.3), ) assert ( transformer.description == "Ballpark geocentric translation from ITRF2014 to ETRS89" ) def test_transformer__area_of_interest__invalid(): with pytest.raises(ProjError): Transformer.from_crs( 4326, 2964, area_of_interest=(-136.46, 49.0, -60.72, 83.17) ) def test_transformer_group__area_of_interest__invalid(): with pytest.raises(ProjError): TransformerGroup(4326, 2964, area_of_interest=(-136.46, 49.0, -60.72, 83.17)) def test_transformer_equals(): assert ( TransformerGroup(28356, 7856).transformers[0] == TransformerGroup(28356, 7856).transformers[0] ) @pytest.mark.parametrize( "comparison", [Transformer.from_pipeline("+proj=pipeline +ellps=GRS80 +step +proj=cart"), 22], ) def test_transformer_not_equals(comparison): assert Transformer.from_crs(28356, 7856) != comparison @pytest.mark.parametrize( "pipeline_str", [ "+proj=pipeline +ellps=GRS80 +step +proj=cart", "+proj=pipeline +step +proj=unitconvert +xy_in=deg " "+xy_out=rad +ellps=GRS80 +step +proj=cart", ], ) def test_pipeline_transform(pipeline_str): trans = Transformer.from_pipeline(pipeline_str) assert_almost_equal( trans.transform(50, 25, 0), (3717892.6072086394, 4430811.87152035, 2679074.4628772778), ) @pytest.mark.parametrize( "pipeline_str", [ "+proj=pipeline +ellps=GRS80 +step +proj=cart", "+proj=pipeline +step +proj=unitconvert +xy_in=deg " "+xy_out=rad +ellps=GRS80 +step +proj=cart", ], ) def test_pipeline_itransform(pipeline_str): trans = Transformer.from_pipeline(pipeline_str) assert_almost_equal( list(trans.itransform([(50, 25, 0)])), [(3717892.6072086394, 4430811.87152035, 2679074.4628772778)], ) @pytest.mark.parametrize( "transformer", [ partial( Transformer.from_pipeline, "+proj=pipeline +ellps=GRS80 +step +proj=cart" ), partial(Transformer.from_crs, 4326, 3857), partial(Transformer.from_proj, 4326, 3857), ], ) @patch.dict("os.environ", {"PROJ_NETWORK": "ON"}, clear=True) def test_network__disable(transformer): with proj_network_env(): pyproj.network.set_network_enabled(active=False) trans = transformer() assert trans.is_network_enabled is False @pytest.mark.parametrize( "transformer", [ partial( Transformer.from_pipeline, "+proj=pipeline +ellps=GRS80 +step +proj=cart" ), partial(Transformer.from_crs, 4326, 3857), partial(Transformer.from_proj, 4326, 3857), ], ) @patch.dict("os.environ", {"PROJ_NETWORK": "OFF"}, clear=True) def test_network__enable(transformer): with proj_network_env(): pyproj.network.set_network_enabled(active=True) trans = transformer() assert trans.is_network_enabled is True @pytest.mark.parametrize( "transformer", [ partial( Transformer.from_pipeline, "+proj=pipeline +ellps=GRS80 +step +proj=cart" ), partial(Transformer.from_crs, 4326, 3857), partial(Transformer.from_proj, 4326, 3857), ], ) def test_network__default(transformer): with proj_network_env(): pyproj.network.set_network_enabled() trans = transformer() assert trans.is_network_enabled == (os.environ.get("PROJ_NETWORK") == "ON") @patch.dict("os.environ", {"PROJ_NETWORK": "OFF"}, clear=True) def test_transformer_group__network_enabled(): with proj_network_env(): pyproj.network.set_network_enabled(active=True) trans_group = TransformerGroup(4326, 2964) assert len(trans_group.unavailable_operations) == 0 assert len(trans_group.transformers) == 10 assert trans_group.best_available for transformer in trans_group.transformers: assert transformer.is_network_enabled is True for operation in transformer.operations: for grid in operation.grids: assert grid.available @pytest.mark.grid @patch.dict("os.environ", {"PROJ_NETWORK": "ON"}, clear=True) def test_transformer_group__network_disabled(): with proj_network_env(): pyproj.network.set_network_enabled(active=False) trans_group = TransformerGroup(4326, 2964) for transformer in trans_group.transformers: assert transformer.is_network_enabled is False if grids_available( "us_noaa_alaska.tif", "ca_nrc_ntv2_0.tif", check_network=False, check_all=True, ): assert len(trans_group.unavailable_operations) == 0 assert len(trans_group.transformers) == 10 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert trans_group.best_available elif grids_available("us_noaa_alaska.tif", check_network=False): assert len(trans_group.unavailable_operations) == 1 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert len(trans_group.transformers) == 9 assert trans_group.best_available elif grids_available("ca_nrc_ntv2_0.tif", check_network=False): assert len(trans_group.unavailable_operations) == 1 assert ( trans_group.transformers[0].description == "Inverse of NAD27 to WGS 84 (7) + Alaska Albers" ) assert len(trans_group.transformers) == 9 assert not trans_group.best_available else: assert len(trans_group.unavailable_operations) == 2 assert ( trans_group.unavailable_operations[0].name == "Inverse of NAD27 to WGS 84 (85) + Alaska Albers" ) assert len(trans_group.transformers) == 8 assert not trans_group.best_available def test_transform_pipeline_radians(): trans = Transformer.from_pipeline( "+proj=pipeline +step +inv +proj=cart +ellps=WGS84 " "+step +proj=unitconvert +xy_in=rad +xy_out=deg" ) assert_almost_equal( trans.transform(-2704026.010, -4253051.810, 3895878.820, radians=True), (-2.137113493845668, 0.6613203738996222, -20.531156923621893), ) assert_almost_equal( trans.transform( -2.137113493845668, 0.6613203738996222, -20.531156923621893, radians=True, direction=TransformDirection.INVERSE, ), (-2704026.010, -4253051.810, 3895878.820), ) def test_itransform_pipeline_radians(): trans = Transformer.from_pipeline( "+proj=pipeline +step +inv +proj=cart +ellps=WGS84 " "+step +proj=unitconvert +xy_in=rad +xy_out=deg" ) assert_almost_equal( list( trans.itransform([(-2704026.010, -4253051.810, 3895878.820)], radians=True) ), [(-2.137113493845668, 0.6613203738996222, -20.531156923621893)], ) assert_almost_equal( list( trans.itransform( [(-2.137113493845668, 0.6613203738996222, -20.531156923621893)], radians=True, direction=TransformDirection.INVERSE, ) ), [(-2704026.010, -4253051.810, 3895878.820)], ) @pytest.mark.parametrize("x,y,z", permutations([10, [10], (10,)])) # 6 test cases def test_transform_honours_input_types(x, y, z): # 622 transformer = Transformer.from_proj(4896, 4896) assert transformer.transform(xx=x, yy=y, zz=z) == (x, y, z) @pytest.mark.grid @pytest.mark.network @patch("pyproj.transformer.get_user_data_dir") def test_transformer_group__download_grids(get_user_data_dir_mock, tmp_path, capsys): get_user_data_dir_mock.return_value = str(tmp_path) with proj_network_env(): pyproj.network.set_network_enabled(active=False) trans_group = TransformerGroup(4326, 2964) trans_group.download_grids(verbose=True) captured = capsys.readouterr() get_user_data_dir_mock.assert_called_with(True) paths = sorted(Path(path).name for path in glob(str(tmp_path.joinpath("*")))) if grids_available( "us_noaa_alaska.tif", "ca_nrc_ntv2_0.tif", check_network=False, check_all=True, ): assert paths == [] assert captured.out == "" elif grids_available("us_noaa_alaska.tif", check_network=False): assert paths == ["ca_nrc_ntv2_0.tif"] assert ( captured.out == "Downloading: https://cdn.proj.org/ca_nrc_ntv2_0.tif\n" ) elif grids_available("ca_nrc_ntv2_0.tif", check_network=False): assert paths == ["us_noaa_alaska.tif"] assert captured.out == ( "Downloading: https://cdn.proj.org/us_noaa_alaska.tif\n" ) else: assert paths == ["ca_nrc_ntv2_0.tif", "us_noaa_alaska.tif"] assert captured.out == ( "Downloading: https://cdn.proj.org/us_noaa_alaska.tif\n" "Downloading: https://cdn.proj.org/ca_nrc_ntv2_0.tif\n" ) # make sure not downloaded again with ( proj_env(), patch("pyproj.transformer._download_resource_file") as download_mock, ): append_data_dir(str(tmp_path)) trans_group = TransformerGroup(4326, 2964) trans_group.download_grids() get_user_data_dir_mock.assert_called_with(True) download_mock.assert_not_called() @pytest.mark.grid @patch("pyproj.transformer._download_resource_file") @patch("pyproj.transformer.get_user_data_dir") def test_transformer_group__download_grids__directory( get_user_data_dir_mock, download_mock, tmp_path, capsys ): with proj_network_env(): pyproj.network.set_network_enabled(active=False) trans_group = TransformerGroup(4326, 2964) trans_group.download_grids(directory=tmp_path) get_user_data_dir_mock.assert_not_called() captured = capsys.readouterr() assert captured.out == "" if grids_available( "us_noaa_alaska.tif", "ca_nrc_ntv2_0.tif", check_network=False, check_all=True, ): download_mock.assert_not_called() elif grids_available("us_noaa_alaska.tif", check_network=False): download_mock.assert_called_with( file_url="https://cdn.proj.org/ca_nrc_ntv2_0.tif", short_name="ca_nrc_ntv2_0.tif", directory=tmp_path, verbose=False, ) elif grids_available("ca_nrc_ntv2_0.tif", check_network=False): download_mock.assert_called_with( file_url="https://cdn.proj.org/us_noaa_alaska.tif", short_name="us_noaa_alaska.tif", directory=tmp_path, verbose=False, ) else: download_mock.assert_has_calls( [ call( file_url="https://cdn.proj.org/us_noaa_alaska.tif", short_name="us_noaa_alaska.tif", directory=tmp_path, verbose=False, ), call( file_url="https://cdn.proj.org/ca_nrc_ntv2_0.tif", short_name="ca_nrc_ntv2_0.tif", directory=tmp_path, verbose=False, ), ], any_order=True, ) def test_transformer_multithread__pipeline(): # https://github.com/pyproj4/pyproj/issues/782 trans = Transformer.from_pipeline( "+proj=pipeline +step +inv +proj=cart +ellps=WGS84 " "+step +proj=unitconvert +xy_in=rad +xy_out=deg" ) def transform(num): return trans.transform(-2704026.010, -4253051.810, 3895878.820) with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: for result in executor.map(transform, range(10)): pass def test_transformer_multithread__crs(): # https://github.com/pyproj4/pyproj/issues/782 trans = Transformer.from_crs(4326, 3857) def transform(num): return trans.transform(1, 2) with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: for result in executor.map(transform, range(10)): pass def test_transformer_accuracy_filter(): with pytest.raises(ProjError): Transformer.from_crs("EPSG:4326", "EPSG:4258", accuracy=0.05) def test_transformer_allow_ballpark_filter(): with pytest.raises(ProjError): Transformer.from_crs( "EPSG:4326", "EPSG:4258", authority="PROJ", allow_ballpark=False ) def test_transformer_authority_filter(): transformer = Transformer.from_crs("EPSG:4326", "EPSG:4258", authority="PROJ") assert transformer.description == "Ballpark geographic offset from WGS 84 to ETRS89" @pytest.mark.parametrize( "input_string", [ "EPSG:1671", "RGF93 v1 to WGS 84 (1)", "urn:ogc:def:coordinateOperation:EPSG::1671", ], ) def test_transformer_from_pipeline__input_types(input_string): assert ( Transformer.from_pipeline(input_string).description == "RGF93 v1 to WGS 84 (1)" ) @pytest.mark.parametrize( "method_name", [ "to_wkt", "to_json", ], ) def test_transformer_from_pipeline__wkt_json(method_name): assert ( Transformer.from_pipeline( getattr( Transformer.from_pipeline("urn:ogc:def:coordinateOperation:EPSG::1671"), method_name, )() ).description == "RGF93 v1 to WGS 84 (1)" ) @pytest.mark.parametrize( "density,expected", [ (0, (-1684649.41338, -350356.81377, 1684649.41338, 2234551.18559)), (100, (-1684649.41338, -555777.79210, 1684649.41338, 2234551.18559)), ], ) def test_transform_bounds_densify(density, expected): transformer = Transformer.from_crs( "EPSG:4326", "+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 " "+a=6370997 +b=6370997 +units=m +no_defs", ) assert numpy.allclose( transformer.transform_bounds(40, -120, 64, -80, densify_pts=density), expected, ) @pytest.mark.parametrize( "density,expected", [ (0, (-1684649.41338, -350356.81377, 1684649.41338, 2234551.18559)), (100, (-1684649.41338, -555777.79210, 1684649.41338, 2234551.18559)), ], ) @pytest.mark.parametrize( "input_bounds, radians", [ ((-120, 40, -80, 64), False), ( ( numpy.radians(-120), numpy.radians(40), numpy.radians(-80), numpy.radians(64), ), True, ), ], ) def test_transform_bounds_densify__xy(density, expected, input_bounds, radians): transformer = Transformer.from_crs( "EPSG:4326", "+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 " "+a=6370997 +b=6370997 +units=m +no_defs", always_xy=True, ) assert numpy.allclose( transformer.transform_bounds( *input_bounds, densify_pts=density, radians=radians ), expected, ) def test_transform_bounds_densify_out_of_bounds(): transformer = Transformer.from_crs( "EPSG:4326", "+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 " "+a=6370997 +b=6370997 +units=m +no_defs", always_xy=True, ) with pytest.raises(ProjError): transformer.transform_bounds(-120, 40, -80, 64, densify_pts=-1) def test_transform_bounds_densify_out_of_bounds__geographic_output(): transformer = Transformer.from_crs( "+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 " "+a=6370997 +b=6370997 +units=m +no_defs", "EPSG:4326", always_xy=True, ) with pytest.raises(ProjError): transformer.transform_bounds(-120, 40, -80, 64, densify_pts=1) def test_transform_bounds_radians_output(): transformer = Transformer.from_crs( "EPSG:4326", "+proj=geocent +ellps=WGS84 +datum=WGS84", always_xy=True, ) assert_almost_equal( transformer.transform_bounds( -2704026.010, -4253051.810, -2704025.010, -4253050.810, radians=True, direction="INVERSE", ), (-2.1371136, 0.0, -2.1371133, 0.0), ) def test_transform_bounds__antimeridian(): crs = CRS("EPSG:3851") transformer = Transformer.from_crs(crs.geodetic_crs, crs) minx, miny, maxx, maxy = crs.area_of_use.bounds transformed_bounds = transformer.transform_bounds(miny, minx, maxy, maxx) assert_almost_equal( transformed_bounds, (5228058.6143420935, 1722483.900174921, 8692574.544944234, 4624385.494808555), ) assert_almost_equal( transformer.transform_bounds( *transformed_bounds, direction="INVERSE", ), (-56.7471249, 153.2799922, -24.6148194, -162.1813873), ) def test_transform_bounds__antimeridian__xy(): crs = CRS("EPSG:3851") transformer = Transformer.from_crs( crs.geodetic_crs, crs, always_xy=True, ) transformed_bounds = transformer.transform_bounds(*crs.area_of_use.bounds) assert_almost_equal( transformed_bounds, (1722483.900174921, 5228058.6143420935, 4624385.494808555, 8692574.544944234), ) assert_almost_equal( transformer.transform_bounds(*transformed_bounds, direction="INVERSE"), (153.2799922, -56.7471249, -162.1813873, -24.6148194), ) def test_transform_bounds__beyond_global_bounds(): transformer = Transformer.from_crs( "EPSG:6933", "EPSG:4326", always_xy=True, ) assert_almost_equal( transformer.transform_bounds( -17367531.3203125, -7314541.19921875, 17367531.3203125, 7314541.19921875 ), (-180, -85.0445994113099, 180, 85.0445994113099), ) @pytest.mark.parametrize( "input_crs,input_bounds,expected_bounds", [ ( "ESRI:102036", (-180.0, -90.0, 180.0, 1.3), (0, -116576599, 0, 0), ), ("ESRI:54026", (-180.0, -90.0, 180.0, 90.0), (0, -179545824, 0, 179545824)), ], ) def test_transform_bounds__ignore_inf(input_crs, input_bounds, expected_bounds): crs = CRS(input_crs) transformer = Transformer.from_crs(crs.geodetic_crs, crs, always_xy=True) assert_almost_equal( transformer.transform_bounds(*input_bounds), expected_bounds, decimal=0, ) def test_transform_bounds__ignore_inf_geographic(): crs_wkt = ( 'PROJCS["Interrupted_Goode_Homolosine",' 'GEOGCS["GCS_unnamed ellipse",DATUM["D_unknown",' 'SPHEROID["Unknown",6378137,298.257223563]],' 'PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199433]],' 'PROJECTION["Interrupted_Goode_Homolosine"],' 'UNIT["metre",1,AUTHORITY["EPSG","9001"]],' 'AXIS["Easting",EAST],AXIS["Northing",NORTH]]' ) transformer = Transformer.from_crs(crs_wkt, "EPSG:4326", always_xy=True) assert_almost_equal( transformer.transform_bounds( left=-15028000.0, bottom=7515000.0, right=-14975000.0, top=7556000.0 ), (-179.2133, 70.9345, -177.9054, 71.4364), decimal=0, ) def test_transform_bounds__noop_geographic(): crs = CRS("Pulkovo 1942") transformer = Transformer.from_crs(crs.geodetic_crs, crs, always_xy=True) assert_almost_equal( transformer.transform_bounds(*crs.area_of_use.bounds), crs.area_of_use.bounds, ) def test_transform_bounds__north_pole(): crs = CRS("EPSG:32661") transformer = Transformer.from_crs(crs, "EPSG:4326") minx, miny, maxx, maxy = crs.area_of_use.bounds bounds = transformer.transform_bounds(miny, minx, maxy, maxx, direction="INVERSE") assert_almost_equal( bounds, ( -1405880.72, -1371213.76, 5405880.72, 5371213.76, ), decimal=0, ) assert_almost_equal( transformer.transform_bounds(*bounds), (48.656, -180.0, 90.0, 180.0), decimal=0, ) def test_transform_bounds__north_pole__xy(): crs = CRS("EPSG:32661") transformer = Transformer.from_crs(crs, "EPSG:4326", always_xy=True) bounds = transformer.transform_bounds(*crs.area_of_use.bounds, direction="INVERSE") assert_almost_equal( bounds, (-1371213.76, -1405880.72, 5371213.76, 5405880.72), decimal=0, ) assert_almost_equal( transformer.transform_bounds(*bounds), (-180.0, 48.656, 180.0, 90.0), decimal=0, ) def test_transform_bounds__south_pole(): crs = CRS("EPSG:32761") transformer = Transformer.from_crs(crs, "EPSG:4326") minx, miny, maxx, maxy = crs.area_of_use.bounds bounds = transformer.transform_bounds(miny, minx, maxy, maxx, direction="INVERSE") assert_almost_equal( bounds, ( -1405880.72, -1371213.76, 5405880.72, 5371213.76, ), decimal=0, ) assert_almost_equal( transformer.transform_bounds(*bounds), (-90, -180.0, -48.656, 180.0), decimal=0, ) def test_transform_bounds__south_pole__xy(): crs = CRS("EPSG:32761") transformer = Transformer.from_crs(crs, "EPSG:4326", always_xy=True) bounds = transformer.transform_bounds(*crs.area_of_use.bounds, direction="INVERSE") assert_almost_equal( bounds, (-1371213.76, -1405880.72, 5371213.76, 5405880.72), decimal=0, ) assert_almost_equal( transformer.transform_bounds(*bounds), (-180.0, -90.0, 180.0, -48.656), decimal=0, ) @pytest.mark.parametrize("inplace", [True, False]) def test_transform__fortran_order(inplace): lons, lats = numpy.arange(-180, 180, 20), numpy.arange(-90, 90, 10) lats, lons = numpy.meshgrid(lats, lons) f_lons, f_lats = lons.copy(order="F"), lats.copy(order="F") transformer = Transformer.from_crs( "EPSG:4326", "EPSG:6933", always_xy=True, ) xxx, yyy = transformer.transform(lons, lats) f_xxx, f_yyy = transformer.transform(f_lons, f_lats, inplace=inplace) assert f_lons.flags.f_contiguous assert f_lats.flags.f_contiguous assert not f_xxx.flags.f_contiguous assert f_xxx.flags.c_contiguous assert not f_yyy.flags.f_contiguous assert f_yyy.flags.c_contiguous assert_array_equal(xxx, f_xxx) assert_array_equal(yyy, f_yyy) def test_4d_transform__inplace__array(): transformer = Transformer.from_crs(7789, 8401) xarr = array("d", [3496737.2679]) yarr = array("d", [743254.4507]) zarr = array("d", [5264462.9620]) tarr = array("d", [2019.0]) t_xarr, t_yarr, t_zarr, t_tarr = transformer.transform( xx=xarr, yy=yarr, zz=zarr, tt=tarr, inplace=True ) assert xarr is t_xarr assert_almost_equal(xarr[0], 3496737.757717311) assert yarr is t_yarr assert_almost_equal(yarr[0], 743253.9940103051) assert zarr is t_zarr assert_almost_equal(zarr[0], 5264462.701132784) assert tarr is t_tarr assert_almost_equal(tarr[0], 2019.0) def test_4d_transform__inplace__array__int(): transformer = Transformer.from_crs(7789, 8401) xarr = array("i", [3496737]) yarr = array("i", [743254]) zarr = array("i", [5264462]) tarr = array("i", [2019]) t_xarr, t_yarr, t_zarr, t_tarr = transformer.transform( xx=xarr, yy=yarr, zz=zarr, tt=tarr, inplace=True ) assert xarr is not t_xarr assert xarr[0] == 3496737 assert yarr is not t_yarr assert yarr[0] == 743254 assert zarr is not t_zarr assert zarr[0] == 5264462 assert tarr is not t_tarr assert tarr[0] == 2019 def test_4d_transform__inplace__numpy(): transformer = Transformer.from_crs(7789, 8401) xarr = numpy.array([3496737.2679], dtype=numpy.float64) yarr = numpy.array([743254.4507], dtype=numpy.float64) zarr = numpy.array([5264462.9620], dtype=numpy.float64) tarr = numpy.array([2019.0], dtype=numpy.float64) t_xarr, t_yarr, t_zarr, t_tarr = transformer.transform( xx=xarr, yy=yarr, zz=zarr, tt=tarr, inplace=True ) assert xarr is t_xarr assert_almost_equal(xarr[0], 3496737.757717311) assert yarr is t_yarr assert_almost_equal(yarr[0], 743253.9940103051) assert zarr is t_zarr assert_almost_equal(zarr[0], 5264462.701132784) assert tarr is t_tarr assert_almost_equal(tarr[0], 2019.0) def test_4d_transform__inplace__numpy__int(): transformer = Transformer.from_crs(7789, 8401) xarr = numpy.array([3496737], dtype=numpy.int32) yarr = numpy.array([743254], dtype=numpy.int32) zarr = numpy.array([5264462], dtype=numpy.int32) tarr = numpy.array([2019], dtype=numpy.int32) t_xarr, t_yarr, t_zarr, t_tarr = transformer.transform( xx=xarr, yy=yarr, zz=zarr, tt=tarr, inplace=True ) assert xarr is not t_xarr assert xarr[0] == 3496737 assert yarr is not t_yarr assert yarr[0] == 743254 assert zarr is not t_zarr assert zarr[0] == 5264462 assert tarr is not t_tarr assert tarr[0] == 2019 def test_transformer_source_target_crs(): transformer = Transformer.from_crs("EPSG:4326", "EPSG:4258") assert transformer.source_crs == "EPSG:4326" assert transformer.target_crs == "EPSG:4258" def test_transformer_source_target_crs__none(): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert transformer.source_crs is None assert transformer.target_crs is None def test_pickle_transformer_from_pipeline(): transformer = Transformer.from_pipeline("+init=ITRF2008:ITRF2000") assert transformer == pickle.loads(pickle.dumps(transformer)) def test_pickle_transformer_from_crs(): transformer = Transformer.from_crs( "EPSG:4326", "EPSG:2964", always_xy=True, area_of_interest=AreaOfInterest(-136.46, 49.0, -60.72, 83.17), ) assert transformer == pickle.loads(pickle.dumps(transformer)) def test_unpickle_transformer_from_crs_v1_3(): pickled_transformer = ( b"\x80\x04\x95p\x01\x00\x00\x00\x00\x00\x00\x8c\x12" b"pyproj.transformer\x94\x8c\x0bTransformer\x94\x93\x94)" b"\x81\x94}\x94\x8c\x12_transformer_maker\x94h\x00\x8c\x12" b"TransformerFromCRS\x94\x93\x94)\x81\x94}\x94(\x8c\x08" b"crs_from\x94C\tEPSG:4326\x94\x8c\x06crs_to\x94C\tEPSG:2964" b"\x94\x8c\talways_xy\x94\x88\x8c\x10area_of_interest\x94\x8c\n" b"pyproj.aoi\x94\x8c\x0eAreaOfInterest\x94\x93\x94)\x81\x94}\x94" b"(\x8c\x0fwest_lon_degree\x94G\xc0a\x0e\xb8Q\xeb\x85\x1f\x8c\x10" b"south_lat_degree\x94G@H\x80\x00\x00\x00\x00\x00\x8c\x0f" b"east_lon_degree\x94G\xc0N\\(\xf5\xc2\x8f\\\x8c\x10" b"north_lat_degree\x94G@T\xca\xe1G\xae\x14" b"{ub\x8c\tauthority\x94N\x8c\x08accuracy\x94N\x8c\x0eallow_ballpark\x94Nubsb." ) transformer = Transformer.from_crs( "EPSG:4326", "EPSG:2964", always_xy=True, area_of_interest=AreaOfInterest(-136.46, 49.0, -60.72, 83.17), ) assert transformer == pickle.loads(pickled_transformer) def test_transformer_group_accuracy_filter(): group = TransformerGroup("EPSG:4326", "EPSG:4258", accuracy=0.05) assert not group.transformers assert not group.unavailable_operations def test_transformer_group_allow_ballpark_filter(): group = TransformerGroup( "EPSG:4326", "EPSG:4258", authority="PROJ", allow_ballpark=False ) assert not group.transformers assert not group.unavailable_operations def test_transformer_group_allow_superseded_filter(): default_group = TransformerGroup(4203, 4326) superseded_group = TransformerGroup(4203, 4326, allow_superseded=True) assert len(superseded_group.transformers) > len(default_group.transformers) def test_transformer_group_authority_filter(): group = TransformerGroup("EPSG:4326", "EPSG:4258", authority="PROJ") assert len(group.transformers) == 1 assert not group.unavailable_operations assert ( group.transformers[0].description == "Ballpark geographic offset from WGS 84 to ETRS89" ) def test_transformer_force_over(): transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857", force_over=True) # Test a point along the equator. # The same point, but in two different representations. xxx, yyy = transformer.transform(0, 140) xxx_over, yyy_over = transformer.transform(0, -220) # Web Mercator x's between 0 and 180 longitude come out positive. # But when forcing the over flag, the -220 calculation makes it flip. assert xxx > 0 assert xxx_over < 0 # check it works in both directions xxx_inverse, yyy_inverse = transformer.transform( xxx, yyy, direction=TransformDirection.INVERSE ) xxx_over_inverse, yyy_over_inverse = transformer.transform( xxx_over, yyy_over, direction=TransformDirection.INVERSE ) assert_almost_equal(xxx_inverse, 0) assert_almost_equal(xxx_over_inverse, 0) assert_almost_equal(yyy_inverse, 140) assert_almost_equal(yyy_over_inverse, -220) def test_transformer__get_last_used_operation(): transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857") with pytest.raises( ProjError, match=( r"Last used operation not found\. " r"This is likely due to not initiating a transform\." ), ): transformer.get_last_used_operation() xxx, yyy = transformer.transform(1, 2) operation = transformer.get_last_used_operation() assert isinstance(operation, Transformer) assert xxx, yyy == operation.transform(1, 2) pyproj-3.7.1/test/test_utils.py000066400000000000000000000037451475425760300166230ustar00rootroot00000000000000from array import array import numpy import pytest from pyproj.utils import DataType, _copytobuffer, _copytobuffer_return_scalar @pytest.mark.parametrize("in_data", [numpy.array(1), 1]) def test__copytobuffer_return_scalar(in_data): assert _copytobuffer_return_scalar(in_data) == (array("d", [1]), DataType.FLOAT) def test__copytobuffer_return_scalar__invalid(): with pytest.raises(TypeError): _copytobuffer_return_scalar("invalid") @pytest.mark.parametrize( "in_data, data_type", [ (numpy.array(1), DataType.FLOAT), (1, DataType.FLOAT), ([1], DataType.LIST), ((1,), DataType.TUPLE), ], ) def test__copytobuffer(in_data, data_type): assert _copytobuffer(in_data) == (array("d", [1]), data_type) def test__copytobuffer__xarray_scalar(): xarray = pytest.importorskip("xarray") assert _copytobuffer(xarray.DataArray(numpy.array(1))) == ( array("d", [1]), DataType.FLOAT, ) @pytest.mark.parametrize("arr_type", ["numpy", "xarray", "pandas"]) def test__copytobuffer__array(arr_type): in_arr = numpy.array([1]) if arr_type == "xarray": xarray = pytest.importorskip("xarray") in_arr = xarray.DataArray(in_arr) elif arr_type == "pandas": pandas = pytest.importorskip("pandas") in_arr = pandas.Series(in_arr) assert _copytobuffer(in_arr) == ( in_arr.astype("d").__array__(), DataType.ARRAY, ) def test__copytobuffer__numpy_masked_array(): in_arr = numpy.ma.array([1]) out_arr, dtype = _copytobuffer(in_arr) assert isinstance(out_arr, numpy.ma.MaskedArray) def test__copytobuffer__fortran_order(): data = numpy.ones((2, 4), dtype=numpy.float64, order="F") converted_data, dtype = _copytobuffer(data) assert data.flags.f_contiguous assert not converted_data.flags.f_contiguous assert converted_data.flags.c_contiguous def test__copytobuffer__invalid(): with pytest.raises(TypeError): _copytobuffer("invalid")