pax_global_header00006660000000000000000000000064142002325270014507gustar00rootroot0000000000000052 comment=89541769ae00b457b062c7478465c059638025bf Fiona-1.8.21/000077500000000000000000000000001420023252700126345ustar00rootroot00000000000000Fiona-1.8.21/.coveragerc000066400000000000000000000000541420023252700147540ustar00rootroot00000000000000[run] plugins = Cython.Coverage omit = *pxd Fiona-1.8.21/.github/000077500000000000000000000000001420023252700141745ustar00rootroot00000000000000Fiona-1.8.21/.github/workflows/000077500000000000000000000000001420023252700162315ustar00rootroot00000000000000Fiona-1.8.21/.github/workflows/ci_linux.yml000066400000000000000000000170331420023252700205720ustar00rootroot00000000000000name: Linux CI on: [push, pull_request] jobs: build: name: Python ${{ matrix.python }} / GDAL ${{ matrix.GDALVERSION }} / PROJ ${{ matrix.PROJVERSION }} runs-on: [ubuntu-18.04] if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" strategy: fail-fast: false matrix: include: # Test all supported gdal minor versions (except latest stable) with one python version - { python: 3.6, GDALVERSION: "2.0.3", PROJVERSION: "4.9.3", allow_failure: "false", } - { python: 3.6, GDALVERSION: "2.1.4", PROJVERSION: "4.9.3", allow_failure: "false", } - { python: 3.6, GDALVERSION: "2.2.4", PROJVERSION: "4.9.3", allow_failure: "false", } - { python: 3.6, GDALVERSION: "2.3.3", PROJVERSION: "4.9.3", allow_failure: "false", } - { python: 3.6, GDALVERSION: "2.4.4", PROJVERSION: "4.9.3", allow_failure: "false", } - { python: 3.6, GDALVERSION: "3.0.4", PROJVERSION: "6.2.1", allow_failure: "false", } - { python: 3.6, GDALVERSION: "3.1.0", PROJVERSION: "6.3.2", allow_failure: "false", } # Test all supported python versions with latest stable gdal release - { python: 3.6, GDALVERSION: "3.2.1", PROJVERSION: "7.2.1", allow_failure: "false", } - { python: 3.7, GDALVERSION: "3.2.1", PROJVERSION: "7.2.1", allow_failure: "false", } - { python: 3.8, GDALVERSION: "3.2.1", PROJVERSION: "7.2.1", allow_failure: "false", } - { python: 3.9, GDALVERSION: "3.2.1", PROJVERSION: "7.2.1", allow_failure: "false", } - { python: "3.10", GDALVERSION: "3.3.3", PROJVERSION: "8.2.0", allow_failure: "false", } # Test GDAL master - { python: 3.6, GDALVERSION: "master", PROJVERSION: "7.2.1", allow_failure: "true", } env: CYTHON_COVERAGE: "true" MAKEFLAGS: "-j 4 -s" CXXFLAGS: "-O0" CFLAGS: "-O0" PROJVERSION: ${{ matrix.PROJVERSION }} GDALVERSION: ${{ matrix.GDALVERSION }} GDALINST: ${{ github.workspace }}/gdalinstall GDALBUILD: ${{ github.workspace }}/gdalbuild PROJINST: ${{ github.workspace }}/gdalinstall PROJBUILD: ${{ github.workspace }}/projbuild FILEGDB: ${{ github.workspace }}/gdalinstall/filegdb # Emulate travis TRAVIS_BUILD_DIR: ${{ github.workspace }} TRAVIS_OS_NAME: "linux" steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: Set env variables run: | # Additional env variables echo "GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal" >> $GITHUB_ENV echo "PROJ_LIB=$GDALINST/gdal-$GDALVERSION/share/proj" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$GDALINST/proj-$PROJVERSION/lib:$FILEGDB/lib:\$LD_LIBRARY_PATH" >> $GITHUB_ENV # Add PATH echo "$GDALINST/gdal-$GDALVERSION/bin" >> $GITHUB_PATH echo "$GDALINST/proj-$PROJVERSION/bin" >> $GITHUB_PATH echo "cat \$GITHUB_ENV" cat $GITHUB_ENV echo "" echo "cat \$GITHUB_PATH" cat $GITHUB_PATH - name: Install apt packages run: | sudo apt-get install libatlas-base-dev libcurl4-openssl-dev libgeos-dev libfreexl-dev libzstd-dev libspatialite-dev # Unlike travis, packages from non default repositories are installed. # While default repositories e.g. bionic/universe or bionic/main) tend to keep packages at the same API / ABI level, # this is not guaranteed with other repositories. # The following command creates a list of these packages, which is used as key for the GDAL cache # The repositories of packages can be identified in the the output of `sudo apt-get install` apt list --installed | grep 'libgeos-dev\|libxml2-dev' > $GITHUB_WORKSPACE/apt_list cat $GITHUB_WORKSPACE/apt_list - name: Cache GDAL binaries uses: actions/cache@v2 with: path: gdalinstall key: ${{ runner.os }}-gdal-${{ matrix.GDALVERSION }}-proj-${{ matrix.PROJVERSION }}-${{ hashFiles('**/apt_list') }} - name: Cache pip uses: actions/cache@v2 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} restore-keys: | ${{ runner.os }}-pip- - name: Install Python dependencies run: | python -m pip install -U pip python -m pip install -U wheel python -m pip install -r requirements-ci.txt python -m pip wheel -r requirements-dev.txt python -m pip install -r requirements-dev.txt - name: Build PROJ run: | chmod +x scripts/travis_proj_install.sh && ./scripts/travis_proj_install.sh - name: Install FileGDB run: | chmod +x scripts/travis_filegdb_install.sh && ./scripts/travis_filegdb_install.sh - name: Build GDAL continue-on-error: ${{ matrix.allow_failure == 'true' }} run: | chmod +x scripts/travis_gdal_install.sh && ./scripts/travis_gdal_install.sh gdal-config --version - name: Build Fiona continue-on-error: ${{ matrix.allow_failure == 'true' }} run: | if [ "$GDALVERSION" = "master" ]; then echo "Using gdal master"; elif [ $($GDALINST/gdal-$GDALVERSION/bin/gdal-config --version) == $(sed 's/[a-zA-Z].*//g' <<< $GDALVERSION) ]; then echo "Using gdal $GDALVERSION"; else echo "NOT using gdal $GDALVERSION as expected; aborting"; exit 1; fi GDAL_CONFIG=$GDALINST/gdal-$GDALVERSION/bin/gdal-config python -m pip install --no-deps --force-reinstall --no-use-pep517 -e . - name: Print Environment continue-on-error: ${{ matrix.allow_failure == 'true' }} run: | echo "python -m pip freeze" python -m pip freeze echo "" echo "fio --version" fio --version echo "" echo "fio --gdal-version" fio --gdal-version echo "" echo "python -c \"import fiona; fiona.show_versions()\"" python -c "import fiona; fiona.show_versions()" - name: pytest continue-on-error: ${{ matrix.allow_failure == 'true' }} run: | GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES python -m pytest -m "not wheel" --cov fiona --cov-report term-missing - name: Coveralls continue-on-error: ${{ matrix.allow_failure == 'true' }} run: coveralls || echo "!! intermittent coveralls failure" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} Fiona-1.8.21/.github/workflows/rstcheck.yml000066400000000000000000000011601420023252700205600ustar00rootroot00000000000000name: rstcheck # Run this workflow every time a new commit pushed to your repository on: [push, pull_request] jobs: rstcheck: name: rstcheck runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install Python dependencies run: | python -m pip install sphinx==3.2.1 rstcheck==3.3.1 - name: Run rstcheck run: | rstcheck -r --ignore-directives automodule --ignore-substitutions version,release,today . Fiona-1.8.21/.gitignore000066400000000000000000000022471420023252700146310ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg .libs # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # IDE's etc. .idea/ venv/ venv2/ # fiona VERSION.txt fiona/_shim.c fiona/ogrext.c fiona/_crs.c fiona/_drivers.c fiona/_err.c fiona/_geometry.c fiona/_transform.cpp fiona/ograpi.pxd fiona/_shim1.c fiona/_shim2.c fiona/_shim22.c fiona/_shim.pxd fiona/_shim.pyx tests/data/coutwildrnp.json tests/data/coutwildrnp.tar tests/data/coutwildrnp.zip tests/data/coutwildrnp.gpkg .DS_Store .ipynb_checkpoints .pytest_cache MANIFEST fiona/_env.c fiona/ogrext1.c fiona/ogrext2.c fiona/schema.c Fiona-1.8.21/CHANGES.txt000066400000000000000000001013061420023252700144460ustar00rootroot00000000000000Changes ======= All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. 1.8.21 (2022-02-07) ------------------- Changes: - Driver mode support tests have been made more general and less susceptible to driver quirks involving feature fields and coordinate values (#1060). - OSError is raised on attempts to open a dataset in a Python file object in "a" mode (see #1027). - Upgrade attrs, cython, etc to open up Python 3.10 support (#1049). Bug fixes: - Allow FieldSkipLogFilter to handle exception messages as well as strings (reported in #1035). - Clean up VSI files left by MemoryFileBase, resolving #1041. - Hard-coded "utf-8" collection encoding added in #423 has been removed (#1057). 1.8.20 (2021-05-31) ------------------- Packaging: - Wheels include GDAL 3.3.0 and GEOS 3.9.1. Bug fixes: - Allow use with click 8 and higher (#1015). 1.8.19 (2021-04-07) ------------------- Packaging: - Wheels include GDAL 3.2.1 and PROJ 7.2.1. Bug fixes: - In fiona/env.py the GDAL data path is now configured using set_gdal_config instead by setting the GDAL_DATA environment variable (#1007). - Spurious iterator reset warnings have been eliminatged (#987). 1.8.18 (2020-11-17) ------------------- - The precision option of transform has been fixed for the case of GeometryCollections (#971, #972). - Added missing --co (creation) option to fio-load (#390). - If the certifi package can be imported, its certificate store location will be passed to GDAL during import of fiona._env unless CURL_CA_BUNDLE is already set. - Warn when feature fields named "" are found (#955). 1.8.17 (2020-09-09) ------------------- - To fix issue #952 the fio-cat command no longer cuts feature geometries at the anti-meridian by default. A --cut-at-antimeridian option has been added to allow cutting of geometries in a geographic destination coordinate reference system. 1.8.16 (2020-09-04) ------------------- - More OGR errors and warnings arising in calls to GDAL C API functions are surfaced (#946). - A circular import introduced in some cases in 1.8.15 has been fixed (#945). 1.8.15 (2020-09-03) ------------------- - Change shim functions to not return tuples (#942) as a solution for the packaging problem reported in #941. - Raise a Python exception when VSIFOpenL fails (#937). 1.8.14 (2020-08-31) ------------------- - When creating a new Collection in a MemoryFile with a default (random) name Fiona will attempt to use a format driver-supported file extension (#934). When initializing a MemoryFile with bytes of data formatted for a vector driver that requires a certain file name or extension, the user should continue to pass an appropriate filename and/or extension. - Read support for FlatGeobuf has been enabled in the drvsupport module. - The MemoryFile implementation has been improved so that it can support multi-part S3 downloads (#906). This is largely a port of code from rasterio. - Axis ordering for results of fiona.transform was wrong when CRS were passed in the "EPSG:dddd" form (#919). This has been fixed by (#926). - Allow implicit access to the only dataset in a ZipMemoryFile. The path argument of ZipMemoryFile.open() is now optional (#928). - Improve support for datetime types: support milliseconds (#744), timezones (#914) and improve warnings if type is not supported by driver (#572). - Fix "Failed to commit transaction" TransactionError for FileGDB driver. - Load GDAL DLL dependencies on Python 3.8+ / Windows with add_dll_directory() (#851). - Do not require optional properties (#848). - Ensure that slice does not overflow available data (#884). - Resolve issue when "ERROR 4: Unable to open EPSG support file gcs.csv." is raised on importing fiona (#897). - Resolve issue resulting in possible mixed up fields names (affecting only DXF, GPX, GPSTrackMacker and DGN driver) (#916). - Ensure crs_wkt is passed when writing to MemoryFile (#907). 1.8.13.post1 (2020-02-21) ------------------------- - This release is being made to improve binary wheel compatibility with shapely 1.7.0. There have been no changes to the fiona package code since 1.8.13. 1.8.13 (2019-12-05) ------------------- - The Python version specs for argparse and ordereddict in 1.8.12 were wrong and have been corrected (#843). 1.8.12 (2019-12-04) ------------------- - Specify Python versions for argparse, enum34, and ordereddict requirements (#842). 1.8.11 (2019-11-07) ------------------- - Fix an access violation on Windows (#826). 1.8.10 (2019-11-07) ------------------- Deprecations: - Use of vfs keyword argument with open or listlayers has been previously noted as deprecated, but now triggers a deprecation warning. Bug fixes: - fiona.open() can now create new datasets using CRS URNs (#823). - listlayers() now accepts file and Path objects, like open() (#825). - Use new set_proj_search_path() function to set the PROJ data search path. For GDAL versions before 3.0 this sets the PROJ_LIB environment variable. For GDAL version 3.0 this calls OSRSetPROJSearchPaths(), which overrides PROJ_LIB. - Remove old and unused _drivers extension module. - Check for header.dxf file instead of pcs.csv when looking for installed GDAL data. The latter is gone with GDAL 3.0 but the former remains (#818). 1.8.9.post2 (2019-10-22) ------------------------ - The 1.8.9.post1 release introduced a bug affecting builds of the package from a source distribution using GDAL 2.x. This bug has been fixed in commit 960568d. 1.8.9.post1 (2019-10-22) ------------------------ - A change has been made to the package setup script so that the shim module for GDAL 3 is used when building the package from a source distribution. There are no other changes to the package. 1.8.9 (2019-10-21) ------------------ - A shim module and support for GDAL 3.0 has been added. The package can now be built and used with GDAL 3.0 and PROJ 6.1 or 6.2. Note that the 1.8.9 wheels we will upload to PyPI will contain GDAL 2.4.2 and PROJ 4.9.3 as in the 1.8.8 wheels. 1.8.8 (2019-09-25) ------------------ - The schema of geopackage files with a geometry type code of 3000 could not be reported using Fiona 1.8.7. This bug is fixed. 1.8.7 (2019-09-24) ------------------ Bug fixes: - Regression in handling of polygons with M values noted under version 1.8.5 below was in fact not fixed then (see new report #789), but is fixed in version 1.8.7. - Windows filenames containing "!" are now parsed correctly, fixing issue #742. Upcoming changes: - In version 1.9.0, the objects yielded when a Collection is iterated will be mutable mappings but will no longer be instances of Python's dict. Version 1.9 is intended to be backwards compatible with 1.8 except where user code tests `isinstance(feature, dict)`. In version 2.0 the new Feature, Geometry, and Properties classes will become immutable mappings. See https://github.com/Toblerity/fiona-rfc/blob/master/rfc/0001-fiona-2-0-changes.md for more discussion of the upcoming changes for version 2.0. 1.8.6 (2019-03-18) ------------------ - The advertisement for JSON driver enablement in 1.8.5 was false (#176), but in this release they are ready for use. 1.8.5 (2019-03-15) ------------------ - GDAL seems to work best if GDAL_DATA is set as early as possible. Ideally it is set when building the library or in the environment before importing Fiona, but for wheels we patch GDAL_DATA into os.environ when fiona.env is imported. This resolves #731. - A combination of bugs which allowed .cpg files to be overlooked has been fixed (#726). - On entering a collection context (Collection.__enter__) a new anonymous GDAL environment is created if needed and entered. This makes `with fiona.open(...) as collection:` roughly equivalent to `with fiona.open(...) as collection, Env():`. This helps prevent bugs when Collections are created and then used later or in different scopes. - Missing GDAL support for TopoJSON, GeoJSONSeq, and ESRIJSON has been enabled (#721). - A regression in handling of polygons with M values (#724) has been fixed. - Per-feature debug logging calls in OGRFeatureBuilder methods have been eliminated to improve feature writing performance (#718). - Native support for datasets in Google Cloud Storage identified by "gs" resource names has been added (#709). - Support has been added for triangle, polyhedral surface, and TIN geometry types (#679). - Notes about using the MemoryFile and ZipMemoryFile classes has been added to the manual (#674). 1.8.4 (2018-12-10) ------------------ - 3D geometries can now be transformed with a specified precision (#523). - A bug producing a spurious DriverSupportError for Shapefiles with a "time" field (#692) has been fixed. - Patching of the GDAL_DATA environment variable was accidentally left in place in 1.8.3 and now has been removed. 1.8.3 (2018-11-30) ------------------ - The RASTERIO_ENV config environment marker this project picked up from Rasterio has been renamed to FIONA_ENV (#665). - Options --gdal-data and --proj-data have been added to the fio-env command so that users of Rasterio wheels can get paths to set GDAL_DATA and PROJ_LIB environment variables. - The unsuccessful attempt to make GDAL and PROJ support file discovery and configuration automatic within collection's crs and crs_wkt properties has been reverted. Users must execute such code inside a `with Env()` block or set the GDAL_DATA and PROJ_LIB environment variables needed by GDAL. 1.8.2 (2018-11-19) ------------------ Bug fixes: - Raise FionaValueError when an iterator's __next__ is called and the session is found to be missing or inactive instead of passing a null pointer to OGR_L_GetNextFeature (#687). 1.8.1 (2018-11-15) ------------------ Bug fixes: - Add checks around OSRGetAuthorityName and OSRGetAuthorityCode calls that will log problems with looking up these items. - Opened data sources are now released before we raise exceptions in WritingSession.start (#676). This fixes an issue with locked files on Windows. - We now ensure that an Env instance exists when getting the crs or crs_wkt properties of a Collection (#673, #690). Otherwise, required GDAL and PROJ data files included in Fiona wheels can not be found. - GDAL and PROJ data search has been refactored to improve testability (#678). - In the project's Cython code, void* pointers have been replaced with proper GDAL types (#672). - Pervasive warning level log messages about ENCODING creation options (#668) have been eliminated. 1.8.0 (2018-10-31) ------------------ This is the final 1.8.0 release. Thanks, everyone! Bug fixes: - We cpdef Session.stop so that it has a C version that can be called safely from __dealloc__, fixing a PyPy issue (#659, #553). 1.8rc1 (2018-10-26) ------------------- There are no changes in 1.8rc1 other than more test standardization and the introduction of a temporary test_collection_legacy.py module to support the build of fully tested Python 2.7 macosx wheels on Travis-CI. 1.8b2 (2018-10-23) ------------------ Bug fixes: - The ensure_env_with_credentials decorator will no longer clobber credentials of the outer environment. This fixes a bug reported to the Rasterio project and which also existed in Fiona. - An unused import of the packaging module and the dependency have been removed (#653). - The Env class logged to the 'rasterio' hierarchy instead of 'fiona'. This mistake has been corrected (#646). - The Mapping abstract base class is imported from collections.abc when possible (#647). Refactoring: - Standardization of the tests on pytest functions and fixtures continues and is nearing completion (#648, #649, #650, #651, #652). 1.8b1 (2018-10-15) ------------------ Deprecations: - Collection slicing has been deprecated and will be prohibited in a future version. Bug fixes: - Rasterio CRS objects passed to transform module methods will be converted to dicts as needed (#590). - Implicitly convert curve geometries to their linear approximations rather than failing (#617). - Migrated unittest test cases in test_collection.py and test_layer.py to the use of the standard data_dir and path_coutwildrnp_shp fixtures (#616). - Root logger configuration has been removed from all test scripts (#615). - An AWS session is created for the CLI context Env only if explicitly requested, matching the behavior of Rasterio's CLI (#635). - Dependency on attrs is made explicit. - Other dependencies are pinned to known good versions in requirements files. - Unused arguments have been removed from the Env constructor (#637). Refactoring: - A with_context_env decorator has been added and used to set up the GDAL environment for CLI commands. The command functions themselves are now simplified. 1.8a3 (2018-10-01) ------------------ Deprecations: - The ``fiona.drivers()`` context manager is officially deprecated. All users should switch to ``fiona.Env()``, which registers format drivers and manages GDAL configuration in a reversible manner. Bug fixes: - The Collection class now filters log messages about skipped fields to a maximum of one warning message per field (#627). - The boto3 module is only imported when needed (#507, #629). - Compatibility with Click 7.0 is achieved (#633). - Use of %r instead of %s in a debug() call prevents UnicodeDecodeErrors (#620). 1.8a2 (2018-07-24) ------------------ New features: - 64-bit integers are the now the default for int type fields (#562, #564). - 'http', 's3', 'zip+http', and 'zip+s3' URI schemes for datasets are now supported (#425, #426). - We've added a ``MemoryFile`` class which supports formatted in-memory feature collections (#501). - Added support for GDAL 2.x boolean field sub-type (#531). - A new ``fio rm`` command makes it possible to cleanly remove multi-file datasets (#538). - The geometry type in a feature collection is more flexible. We can now specify not only a single geometry type, but a sequence of permissible types, or "Any" to permit any geometry type (#539). - Support for GDAL 2.2+ null fields has been added (#554). - The new ``gdal_open_vector()`` function of our internal API provides much improved error handling (#557). Bug fixes: - The bug involving OrderedDict import on Python 2.7 has been fixed (#533). - An ``AttributeError`` raised when the ``--bbox`` option of fio-cat is used with more than one input file has been fixed (#543, #544). - Obsolete and derelict fiona.tool module has been removed. - Revert the change in 0a2bc7c that discards Z in geometry types when a collection's schema is reported (#541). - Require six version 1.7 or higher (#550). - A regression related to "zip+s3" URIs has been fixed. - Debian's GDAL data locations are now searched by default (#583). 1.8a1 (2017-11-06) ------------------ New features: - Each call of ``writerecords()`` involves one or more transactions of up to 20,000 features each. This improves performance when writing GeoPackage files as the previous transaction size was only 200 features (#476, #491). Packaging: - Fiona's Cython source files have been refactored so that there are no longer separate extension modules for GDAL 1.x and GDAL 2.x. Instead there is a base extension module based on GDAL 2.x and shim modules for installations that use GDAL 1.x. 1.7.11.post1 (2018-01-08) ------------------------- - This post-release adds missing expat (and thereby GPX format) support to the included GDAL library (still version 2.2.2). 1.7.11 (2017-12-14) ------------------- - The ``encoding`` keyword argument for ``fiona.open()``, which is intended to allow a caller to override a data source's own and possibly erroneous encoding, has not been working (#510, #512). The problem is that we weren't always setting GDAL open or config options before opening the data sources. This bug is resolved by a number of commits in the maint-1.7 branch and the fix is demonstrated in tests/test_encoding.py. - An ``--encoding`` option has been added to fio-load to enable creation of encoded shapefiles with an accompanying .cpg file (#499, #517). 1.7.10.post1 (2017-10-30) ------------------------- - A post-release has been made to fix a problem with macosx wheels uploaded to PyPI. 1.7.10 (2017-10-26) ------------------- Bug fixes: - An extraneous printed line from the ``rio cat --layers`` validator has been removed (#478). Packaging: - Official OS X and Manylinux1 wheels (on PyPI) for this release will be compatible with Shapely 1.6.2 and Rasterio 1.0a10 wheels. 1.7.9.post1 (2017-08-21) ------------------------ This release introduces no changes in the Fiona package. It upgrades GDAL from 2.2.0 to 2.2.1 in wheels that we publish to the Python Package Index. 1.7.9 (2017-08-17) ------------------ Bug fixes: - Acquire the GIL for GDAL error callback functions to prevent crashes when GDAL errors occur when the GIL has been released by user code. - Sync and flush layers when closing even when the number of features is not precisely known (#467). 1.7.8 (2017-06-20) ------------------ Bug fixes: - Provide all arguments needed by CPLError based exceptions (#456). 1.7.7 (2017-06-05) ------------------ Bug fixes: - Switch logger `warn()` (deprecated) calls to `warning()`. - Replace all relative imports and cimports in Cython modules with absolute imports (#450). - Avoid setting `PROJ_LIB` to a non-existent directory (#439). 1.7.6 (2017-04-26) ------------------ Bug fixes: - Fall back to `share/proj` for PROJ_LIB (#440). - Replace every call to `OSRDestroySpatialReference()` with `OSRRelease()`, fixing the GPKG driver crasher reported in #441 (#443). - Add a `DriverIOError` derived from `IOError` to use for driver-specific errors such as the GeoJSON driver's refusal to overwrite existing files. Also we now ensure that when this error is raised by `fiona.open()` any created read or write session is deleted, this eliminates spurious exceptions on teardown of broken `Collection` objects (#437, #444). 1.7.5 (2017-03-20) ------------------ Bug fixes: - Opening a data file in read (the default) mode with `fiona.open()` using the the `driver` or `drivers` keyword arguments (to specify certain format drivers) would sometimes cause a crash on Windows due to improperly terminated lists of strings (#428). The fix: Fiona's buggy `string_list()` has been replaced by GDAL's `CSLAddString()`. 1.7.4 (2017-02-20) ------------------ Bug fixes: - OGR's EsriJSON detection fails when certain keys aren't found in the first 6000 bytes of data passed to `BytesCollection` (#422). A .json file extension is now explicitly given to the in-memory file behind `BytesCollection` when the `driver='GeoJSON'` keyword argument is given (#423). 1.7.3 (2017-02-14) ------------------ Roses are red. Tan is a pug. Software regression's the most embarrassing bug. Bug fixes: - Use __stdcall for GDAL error handling callback on Windows as in Rasterio. - Turn on latent support for zip:// URLs in rio-cat and rio-info (#421). - The 1.7.2 release broke support for zip files with absolute paths (#418). This regression has been fixed with tests to confirm. 1.7.2 (2017-01-27) ------------------ Future Deprecation: - `Collection.__next__()` is buggy in that it can lead to duplication of features when used in combination with `Collection.filter()` or `Collection.__iter__()`. It will be removed in Fiona 2.0. Please check for usage of this deprecated feature by running your tests or programs with `PYTHONWARNINGS="always:::fiona"` or `-W"always:::fiona"` and switch from `next(collection)` to `next(iter(collection))` (#301). Bug fix: - Zipped streams of bytes can be accessed by `BytesCollection` (#318). 1.7.1.post1 (2016-12-23) ------------------------ - New binary wheels using version 1.2.0 of sgillies/frs-wheel-builds. See https://github.com/sgillies/frs-wheel-builds/blob/master/CHANGES.txt. 1.7.1 (2016-11-16) ------------------ Bug Fixes: - Prevent Fiona from stumbling over 'Z', 'M', and 'ZM' geometry types introduced in GDAL 2.1 (#384). Fiona 1.7.1 doesn't add explicit support for these types, they are coerced to geometry types 1-7 ('Point', 'LineString', etc.) - Raise an `UnsupportedGeometryTypeError` when a bogus or unsupported geometry type is encountered in a new collection's schema or elsewhere (#340). - Enable `--precision 0` for fio-cat (#370). - Prevent datetime exceptions from unnecessarily stopping collection iteration by yielding `None` (#385) - Replace log.warn calls with log.warning calls (#379). - Print an error message if neither gdal-config or `--gdalversion` indicate a GDAL C API version when running `setup.py` (#364). - Let dict-like subclasses through CRS type checks (#367). 1.7.0post2 (2016-06-15) ----------------------- Packaging: define extension modules for 'clean' and 'config' targets (#363). 1.7.0post1 (2016-06-15) ----------------------- Packaging: No files are copied for the 'clean' setup target (#361, #362). 1.7.0 (2016-06-14) ------------------ The C extension modules in this library can now be built and used with either a 1.x or 2.x release of the GDAL library. Big thanks to René Buffat for leading this effort. Refactoring: - The `ogrext1.pyx` and `ogrext2.pyx` files now use separate C APIs defined in `ogrext1.pxd` and `ogrex2.pxd`. The other extension modules have been refactored so that they do not depend on either of these modules and use subsets of the GDAL/OGR API compatible with both GDAL 1.x and 2.x (#359). Packaging: - Source distributions now contain two different sources for the `ogrext` extension module. The `ogrext1.c` file will be used with GDAL 1.x and the `ogrext2.c` file will be used with GDAL 2.x. 1.7b2 (2016-06-13) ------------------ - New feature: enhancement of the `--layer` option for fio-cat and fio-dump to allow separate layers of one or more multi-layer input files to be selected (#349). 1.7b1 (2016-06-10) ------------------ - New feature: support for GDAL version 2+ (#259). - New feature: a new fio-calc CLI command (#273). - New feature: `--layer` options for fio-info (#316) and fio-load (#299). - New feature: a `--no-parse` option for fio-collect that lets a careful user avoid extra JSON serialization and deserialization (#306). - Bug fix: `+wktext` is now preserved when serializing CRS from WKT to PROJ.4 dicts (#352). - Bug fix: a small memory leak when opening a collection has been fixed (#337). - Bug fix: internal unicode errors now result in a log message and a `UnicodeError` exception, not a `TypeError` (#356). 1.6.4 (2016-05-06) ------------------ - Raise ImportError if the active GDAL library version is >= 2.0 instead of failing unpredictably (#338, #341). Support for GDAL>=2.0 is coming in Fiona 1.7. 1.6.3.post1 (2016-03-27) ------------------------ - No changes to the library in this post-release version, but there is a significant change to the distributions on PyPI: to help make Fiona more compatible with Shapely on OS X, the GDAL shared library included in the macosx (only) binary wheels now statically links the GEOS library. See https://github.com/sgillies/frs-wheel-builds/issues/5. 1.6.3 (2015-12-22) ------------------ - Daytime has been decreasing in the Northern Hemisphere, but is now increasing again as it should. - Non-UTF strings were being passed into OGR functions in some situations and on Windows this would sometimes crash a Python process (#303). Fiona now raises errors derived from UnicodeError when field names or field values can't be encoded. 1.6.2 (2015-09-22) ------------------ - Providing only PROJ4 representations in the dataset meta property resulted in loss of CRS information when using the `fiona.open(..., **src.meta) as dst` pattern (#265). This bug has been addressed by adding a crs_wkt item to the` meta property and extending the `fiona.open()` and the collection constructor to look for and prioritize this keyword argument. 1.6.1 (2015-08-12) ------------------ - Bug fix: Fiona now deserializes JSON-encoded string properties provided by the OGR GeoJSON driver (#244, #245, #246). - Bug fix: proj4 data was not copied properly into binary distributions due to a typo (#254). Special thanks to WFMU DJ Liz Berg for the awesome playlist that's fueling my release sprint. Check it out at http://wfmu.org/playlists/shows/62083. You can't unhear Love Coffin. 1.6.0 (2015-07-21) ------------------ - Upgrade Cython requirement to 0.22 (#214). - New BytesCollection class (#215). - Add GDAL's OpenFileGDB driver to registered drivers (#221). - Implement CLI commands as plugins (#228). - Raise click.abort instead of calling sys.exit, preventing suprising exits (#236). 1.5.1 (2015-03-19) ------------------ - Restore test data to sdists by fixing MANIFEST.in (#216). 1.5.0 (2015-02-02) ------------------ - Finalize GeoJSON feature sequence options (#174). - Fix for reading of datasets that don't support feature counting (#190). - New test dataset (#188). - Fix for encoding error (#191). - Remove confusing warning (#195). - Add data files for binary wheels (#196). - Add control over drivers enabled when reading datasets (#203). - Use cligj for CLI options involving GeoJSON (#204). - Fix fio-info --bounds help (#206). 1.4.8 (2014-11-02) ------------------ - Add missing crs_wkt property as in Rasterio (#182). 1.4.7 (2014-10-28) ------------------ - Fix setting of CRS from EPSG codes (#149). 1.4.6 (2014-10-21) ------------------ - Handle 3D coordinates in bounds() #178. 1.4.5 (2014-10-18) ------------------ - Add --bbox option to fio-cat (#163). - Skip geopackage tests if run from an sdist (#167). - Add fio-bounds and fio-distrib. - Restore fio-dump to working order. 1.4.4 (2014-10-13) ------------------ - Fix accidental requirement on GDAL 1.11 introduced in 1.4.3 (#164). 1.4.3 (2014-10-10) ------------------ - Add support for geopackage format (#160). - Add -f and --format aliases for --driver in CLI (#162). - Add --version option and env command to CLI. 1.4.2 (2014-10-03) ------------------ - --dst-crs and --src-crs options for fio cat and collect (#159). 1.4.1 (2014-09-30) ------------------ - Fix encoding bug in collection's __getitem__ (#153). 1.4.0 (2014-09-22) ------------------ - Add fio cat and fio collect commands (#150). - Return of Python 2.6 compatibility (#148). - Improved CRS support (#149). 1.3.0 (2014-09-17) ------------------ - Add single metadata item accessors to fio inf (#142). - Move fio to setuptools entry point (#142). - Add fio dump and load commands (#143). - Remove fio translate command. 1.2.0 (2014-09-02) ------------------ - Always show property width and precision in schema (#123). - Write datetime properties of features (#125). - Reset spatial filtering in filter() (#129). - Accept datetime.date objects as feature properties (#130). - Add slicing to collection iterators (#132). - Add geometry object masks to collection iterators (#136). - Change source layout to match Shapely and Rasterio (#138). 1.1.6 (2014-07-23) ------------------ - Implement Collection __getitem__() (#112). - Leave GDAL finalization to the DLL's destructor (#113). - Add Collection keys(), values(), items(), __contains__() (#114). - CRS bug fix (#116). - Add fio CLI program. 1.1.5 (2014-05-21) ------------------ - Addition of cpl_errs context manager (#108). - Check for NULLs with '==' test instead of 'is' (#109). - Open auxiliary files with encoding='utf-8' in setup for Python 3 (#110). 1.1.4 (2014-04-03) ------------------ - Convert 'long' in schemas to 'int' (#101). - Carefully map Python schema to the possibly munged internal schema (#105). - Allow writing of features with geometry: None (#71). 1.1.3 (2014-03-23) ------------------ - Always register all GDAL and OGR drivers when entering the DriverManager context (#80, #92). - Skip unsupported field types with a warning (#91). - Allow OGR config options to be passed to fiona.drivers() (#90, #93). - Add a bounds() function (#100). - Turn on GPX driver. 1.1.2 (2014-02-14) ------------------ - Remove collection slice left in dumpgj (#88). 1.1.1 (2014-02-02) ------------------ - Add an interactive file inspector like the one in rasterio. - CRS to_string bug fix (#83). 1.1 (2014-01-22) ---------------- - Use a context manager to manage drivers (#78), a backwards compatible but big change. Fiona is now compatible with rasterio and plays better with the osgeo package. 1.0.3 (2014-01-21) ------------------ - Fix serialization of +init projections (#69). 1.0.2 (2013-09-09) ------------------ - Smarter, better test setup (#65, #66, #67). - Add type='Feature' to records read from a Collection (#68). - Skip geometry validation when using GeoJSON driver (#61). - Dumpgj file description reports record properties as a list (as in dict.items()) instead of a dict. 1.0.1 (2013-08-16) ------------------ - Allow ordering of written fields and preservation of field order when reading (#57). 1.0 (2013-07-30) ----------------- - Add prop_type() function. - Allow UTF-8 encoded paths for Python 2 (#51). For Python 3, paths must always be str, never bytes. - Remove encoding from collection.meta, it's a file creation option only. - Support for linking GDAL frameworks (#54). 0.16.1 (2013-07-02) ------------------- - Add listlayers, open, prop_width to __init__py:__all__. - Reset reading of OGR layer whenever we ask for a collection iterator (#49). 0.16 (2013-06-24) ----------------- - Add support for writing layers to multi-layer files. - Add tests to reach 100% Python code coverage. 0.15 (2013-06-06) ----------------- - Get and set numeric field widths (#42). - Add support for multi-layer data sources (#17). - Add support for zip and tar virtual filesystems (#45). - Add listlayers() function. - Add GeoJSON to list of supported formats (#47). - Allow selection of layers by index or name. 0.14 (2013-05-04) ----------------- - Add option to add JSON-LD in the dumpgj program. - Compare values to six.string_types in Collection constructor. - Add encoding to Collection.meta. - Document dumpgj in README. 0.13 (2013-04-30) ----------------- - Python 2/3 compatibility in a single package. Pythons 2.6, 2.7, 3.3 now supported. 0.12.1 (2013-04-16) ------------------- - Fix messed up linking of README in sdist (#39). 0.12 (2013-04-15) ----------------- - Fix broken installation of extension modules (#35). - Log CPL errors at their matching Python log levels. - Use upper case for encoding names within OGR, lower case in Python. 0.11 (2013-04-14) ----------------- - Cythonize .pyx files (#34). - Work with or around OGR's internal recoding of record data (#35). - Fix bug in serialization of int/float PROJ.4 params. 0.10 (2013-03-23) ----------------- - Add function to get the width of str type properties. - Handle validation and schema representation of 3D geometry types (#29). - Return {'geometry': None} in the case of a NULL geometry (#31). 0.9.1 (2013-03-07) ------------------ - Silence the logger in ogrext.so (can be overridden). - Allow user specification of record field encoding (like 'Windows-1252' for Natural Earth shapefiles) to help when OGR can't detect it. 0.9 (2013-03-06) ---------------- - Accessing file metadata (crs, schema, bounds) on never inspected closed files returns None without exceptions. - Add a dict of supported_drivers and their supported modes. - Raise ValueError for unsupported drivers and modes. - Remove asserts from ogrext.pyx. - Add validate_record method to collections. - Add helpful coordinate system functions to fiona.crs. - Promote use of fiona.open over fiona.collection. - Handle Shapefile's mix of LineString/Polygon and multis (#18). - Allow users to specify width of shapefile text fields (#20). 0.8 (2012-02-21) ---------------- - Replaced .opened attribute with .closed (product of collection() is always opened). Also a __del__() which will close a Collection, but still not to be depended upon. - Added writerecords method. - Added a record buffer and better counting of records in a collection. - Manage one iterator per collection/session. - Added a read-only bounds property. 0.7 (2012-01-29) ---------------- - Initial timezone-naive support for date, time, and datetime fields. Don't use these field types if you can avoid them. RFC 3339 datetimes in a string field are much better. 0.6.2 (2012-01-10) ------------------ - Diagnose and set the driver property of collection in read mode. - Fail if collection paths are not to files. Multi-collection workspaces are a (maybe) TODO. 0.6.1 (2012-01-06) ------------------ - Handle the case of undefined crs for disk collections. 0.6 (2012-01-05) ---------------- - Support for collection coordinate reference systems based on Proj4. - Redirect OGR warnings and errors to the Fiona log. - Assert that pointers returned from the ograpi functions are not NULL before using. 0.5 (2011-12-19) ---------------- - Support for reading and writing collections of any geometry type. - Feature and Geometry classes replaced by mappings (dicts). - Removal of Workspace class. 0.2 (2011-09-16) ---------------- - Rename WorldMill to Fiona. 0.1.1 (2008-12-04) ------------------ - Support for features with no geometry. Fiona-1.8.21/CITATION.txt000066400000000000000000000004501420023252700146060ustar00rootroot00000000000000If you use Fiona for any published work, please cite it using the reference below: @Misc{, author = {Sean Gillies and others}, organization = {Toblerity}, title = {Fiona is OGR's neat, nimble, no-nonsense API}, year = {2011--}, url = "https://github.com/Toblerity/Fiona" } Fiona-1.8.21/CODE_OF_CONDUCT.md000066400000000000000000000036751420023252700154460ustar00rootroot00000000000000# Contributor Code of Conduct As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery * Personal attacks * Trolling or insulting/derogatory comments * Public or private harassment * Publishing other's private information, such as physical or electronic addresses, without explicit permission * Other unethical or unprofessional conduct. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) Fiona-1.8.21/CREDITS.txt000066400000000000000000000044141420023252700144750ustar00rootroot00000000000000Credits ======= Fiona is written by: - Sean Gillies - René Buffat - Joshua Arnott - Kevin Wurster - Micah Cochran - Matthew Perry - Elliott Sales de Andrade - Kelsey Jordahl - Patrick Young - Simon Norris - Hannes Gräuler - Johan Van de Wauw - Jacob Wasserman - Michael Weisman - Ryan Grout - Bas Couwenberg - Brendan Ward - Hannes - Michele Citterio - Miro Hrončok - Sid Kapur - Tim Tröndle - fredj - qinfeng - Ariel Nunez - Ariki - Brandon Liu - Chris Mutel - Denis Rykov - Efrén - Egor Fedorov - Even Rouault - Filipe Fernandes - Géraud - Hannes Gräuler - Jesse Crocker - Juan Luis Cano Rodríguez - Ludovic Delauné - Martijn Visser - Matthew Perry - Michael Weisman - Oliver Tonnhofer - Stefano Costa - Stephane Poss - dimlev - wilsaj The GeoPandas project (Joris Van den Bossche et al.) has been a major driver for new features in 1.8.0. Fiona would not be possible without the great work of Frank Warmerdam and other GDAL/OGR developers. Some portions of this work were supported by a grant (for Pleiades_) from the U.S. National Endowment for the Humanities (http://www.neh.gov). .. _Pleiades: http://pleiades.stoa.org Fiona-1.8.21/ISSUE_TEMPLATE.md000066400000000000000000000024361420023252700153460ustar00rootroot00000000000000 ## Expected behavior and actual behavior. For example: I expected to read 10 features from a file and an exception occurred on the 3rd. ## Steps to reproduce the problem. For example: a script with required data. ## Operating system For example: Mac OS X 10.12.3. ## Fiona and GDAL version and provenance For example: the 1.7.10.post1 manylinux1 wheel installed from PyPI using pip version 9.0.1. For example: GDAL 2.1.0 installed via Homebrew Fiona-1.8.21/LICENSE.txt000066400000000000000000000027571420023252700144720ustar00rootroot00000000000000 Copyright (c) 2007, Sean C. Gillies All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Sean C. Gillies nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Fiona-1.8.21/MANIFEST.in000066400000000000000000000006021420023252700143700ustar00rootroot00000000000000global-exclude .DS_Store global-exclude *.pyc recursive-exclude docs/data * recursive-exclude docs/_build * exclude MANIFEST.in exclude *.txt *.py recursive-include docs *.rst *.txt recursive-include tests *.py recursive-include tests/data * include fiona/*.c fiona/*.cpp include CHANGES.txt CREDITS.txt LICENSE.txt VERSION.txt README.rst include benchmark.py setup.py requirements.txt Fiona-1.8.21/README.rst000066400000000000000000000302371420023252700143300ustar00rootroot00000000000000===== Fiona ===== Fiona is GDAL_'s neat and nimble vector API for Python programmers. .. image:: https://github.com/Toblerity/Fiona/workflows/Linux%20CI/badge.svg?branch=maint-1.8 :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.8 .. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master .. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.svg :target: https://coveralls.io/r/Toblerity/Fiona Fiona is designed to be simple and dependable. It focuses on reading and writing data in standard Python IO style and relies upon familiar Python types and protocols such as files, dictionaries, mappings, and iterators instead of classes specific to OGR. Fiona can read and write real-world data using multi-layered GIS formats and zipped virtual file systems and integrates readily with other Python GIS packages such as pyproj_, Rtree_, and Shapely_. Fiona is supported only on CPython versions 2.7 and 3.4+. For more details, see: * Fiona `home page `__ * Fiona `docs and manual `__ * Fiona `examples `__ Usage ===== Collections ----------- Records are read from and written to ``file``-like `Collection` objects returned from the ``fiona.open()`` function. Records are mappings modeled on the GeoJSON format. They don't have any spatial methods of their own, so if you want to do anything fancy with them you will probably need Shapely or something like it. Here is an example of using Fiona to read some records from one data file, change their geometry attributes, and write them to a new data file. .. code-block:: python import fiona # Open a file for reading. We'll call this the "source." with fiona.open('tests/data/coutwildrnp.shp') as src: # The file we'll write to, the "destination", must be initialized # with a coordinate system, a format driver name, and # a record schema. We can get initial values from the open # collection's ``meta`` property and then modify them as # desired. meta = src.meta meta['schema']['geometry'] = 'Point' # Open an output file, using the same format driver and # coordinate reference system as the source. The ``meta`` # mapping fills in the keyword parameters of fiona.open(). with fiona.open('test_write.shp', 'w', **meta) as dst: # Process only the records intersecting a box. for f in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): # Get a point on the boundary of the record's # geometry. f['geometry'] = { 'type': 'Point', 'coordinates': f['geometry']['coordinates'][0][0]} # Write the record out. dst.write(f) # The destination's contents are flushed to disk and the file is # closed when its ``with`` block ends. This effectively # executes ``dst.flush(); dst.close()``. Reading Multilayer data ----------------------- Collections can also be made from single layers within multilayer files or directories of data. The target layer is specified by name or by its integer index within the file or directory. The ``fiona.listlayers()`` function provides an index ordered list of layer names. .. code-block:: python for layername in fiona.listlayers('tests/data'): with fiona.open('tests/data', layer=layername) as src: print(layername, len(src)) # Output: # (u'coutwildrnp', 67) Layer can also be specified by index. In this case, ``layer=0`` and ``layer='test_uk'`` specify the same layer in the data file or directory. .. code-block:: python for i, layername in enumerate(fiona.listlayers('tests/data')): with fiona.open('tests/data', layer=i) as src: print(i, layername, len(src)) # Output: # (0, u'coutwildrnp', 67) Writing Multilayer data ----------------------- Multilayer data can be written as well. Layers must be specified by name when writing. .. code-block:: python with open('tests/data/cowildrnp.shp') as src: meta = src.meta f = next(src) with fiona.open('/tmp/foo', 'w', layer='bar', **meta) as dst: dst.write(f) print(fiona.listlayers('/tmp/foo')) with fiona.open('/tmp/foo', layer='bar') as src: print(len(src)) f = next(src) print(f['geometry']['type']) print(f['properties']) # Output: # [u'bar'] # 1 # Polygon # OrderedDict([(u'PERIMETER', 1.22107), (u'FEATURE2', None), (u'NAME', u'Mount Naomi Wilderness'), (u'FEATURE1', u'Wilderness'), (u'URL', u'http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi'), (u'AGBUR', u'FS'), (u'AREA', 0.0179264), (u'STATE_FIPS', u'49'), (u'WILDRNP020', 332), (u'STATE', u'UT')]) A view of the /tmp/foo directory will confirm the creation of the new files. .. code-block:: console $ ls /tmp/foo bar.cpg bar.dbf bar.prj bar.shp bar.shx Collections from archives and virtual file systems -------------------------------------------------- Zip and Tar archives can be treated as virtual filesystems and Collections can be made from paths and layers within them. In other words, Fiona lets you read and write zipped Shapefiles. .. code-block:: python for i, layername in enumerate(fiona.listlayers('zip://tests/data/coutwildrnp.zip')): with fiona.open('zip://tests/data/coutwildrnp.zip', layer=i) as src: print(i, layername, len(src)) # Output: # (0, u'coutwildrnp', 67) Fiona can also read from more exotic file systems. For instance, a zipped shape file in S3 can be accessed like so: .. code-block:: python with fiona.open('zip+s3://mapbox/rasterio/coutwildrnp.zip') as src: print(len(src)) # Output: # 67 Fiona CLI ========= Fiona's command line interface, named "fio", is documented at `docs/cli.rst `__. Its ``fio info`` pretty prints information about a data file. .. code-block:: console $ fio info --indent 2 tests/data/coutwildrnp.shp { "count": 67, "crs": "EPSG:4326", "driver": "ESRI Shapefile", "bounds": [ -113.56424713134766, 37.0689811706543, -104.97087097167969, 41.99627685546875 ], "schema": { "geometry": "Polygon", "properties": { "PERIMETER": "float:24.15", "FEATURE2": "str:80", "NAME": "str:80", "FEATURE1": "str:80", "URL": "str:101", "AGBUR": "str:80", "AREA": "float:24.15", "STATE_FIPS": "str:80", "WILDRNP020": "int:10", "STATE": "str:80" } } } Installation ============ Fiona requires Python 2.7 or 3.4+ and GDAL/OGR 1.8+. To build from a source distribution you will need a C compiler and GDAL and Python development headers and libraries (libgdal1-dev for Debian/Ubuntu, gdal-dev for CentOS/Fedora). To build from a repository copy, you will also need Cython to build C sources from the project's .pyx files. See the project's requirements-dev.txt file for guidance. The `Kyngchaos GDAL frameworks `__ will satisfy the GDAL/OGR dependency for OS X, as will Homebrew's GDAL Formula (``brew install gdal``). Python Requirements ------------------- Fiona depends on the modules ``enum34``, ``six``, ``cligj``, ``munch``, ``argparse``, and ``ordereddict`` (the two latter modules are standard in Python 2.7+). Pip will fetch these requirements for you, but users installing Fiona from a Windows installer must get them separately. Unix-like systems ----------------- Assuming you're using a virtualenv (if not, skip to the 4th command) and GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well known locations on your system via your system's package manager (``brew install gdal`` using Homebrew on OS X), installation is this simple. .. code-block:: console $ mkdir fiona_env $ virtualenv fiona_env $ source fiona_env/bin/activate (fiona_env)$ pip install fiona If gdal-config is not available or if GDAL/OGR headers and libs aren't installed to a well known location, you must set include dirs, library dirs, and libraries options via the setup.cfg file or setup command line as shown below (using ``git``). You must also specify the version of the GDAL API on the command line using the ``--gdalversion`` argument (see example below) or with the ``GDAL_VERSION`` environment variable (e.g. ``export GDAL_VERSION=2.1``). .. code-block:: console (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal install --gdalversion 2.1 Or specify that build options and GDAL API version should be provided by a particular gdal-config program. .. code-block:: console (fiona_env)$ GDAL_CONFIG=/path/to/gdal-config pip install fiona Windows ------- Binary installers are available at https://www.lfd.uci.edu/~gohlke/pythonlibs/#fiona and coming eventually to PyPI. You can download a binary distribution of GDAL from `here `_. You will also need to download the compiled libraries and headers (include files). When building from source on Windows, it is important to know that setup.py cannot rely on gdal-config, which is only present on UNIX systems, to discover the locations of header files and libraries that Fiona needs to compile its C extensions. On Windows, these paths need to be provided by the user. You will need to find the include files and the library files for gdal and use setup.py as follows. You must also specify the version of the GDAL API on the command line using the ``--gdalversion`` argument (see example below) or with the ``GDAL_VERSION`` environment variable (e.g. ``set GDAL_VERSION=2.1``). .. code-block:: console $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 2.1 Note: The following environment variables needs to be set so that Fiona works correctly: * The directory containing the GDAL DLL (``gdal304.dll`` or similar) needs to be in your Windows ``PATH`` (e.g. ``C:\gdal\bin``). * The gdal-data directory needs to be in your Windows ``PATH`` or the environment variable ``GDAL_DATA`` must be set (e.g. ``C:\gdal\bin\gdal-data``). * The environment variable ``PROJ_LIB`` must be set to the proj library directory (e.g. ``C:\gdal\bin\proj6\share``) The `Appveyor CI build `_ uses the GISInternals GDAL binaries to build Fiona. This produces a binary wheel for successful builds, which includes GDAL and other dependencies, for users wanting to try an unstable development version. The `Appveyor configuration file `_ may be a useful example for users building from source on Windows. Development and testing ======================= Building from the source requires Cython. Tests require `pytest `_. If the GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well known locations on your system (via your system's package manager), you can do this:: (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ pip install cython (fiona_env)$ pip install -e .[test] (fiona_env)$ py.test Or you can use the ``pep-518-install`` script:: (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ ./pep-518-install If you have a non-standard environment, you'll need to specify the include and lib dirs and GDAL library on the command line:: (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2 develop (fiona_env)$ py.test .. _GDAL: http://www.gdal.org .. _pyproj: http://pypi.python.org/pypi/pyproj/ .. _Rtree: http://pypi.python.org/pypi/Rtree/ .. _Shapely: http://pypi.python.org/pypi/Shapely/ .. _gdal-config: http://www.gdal.org/gdal-config.html Fiona-1.8.21/appveyor.yml000066400000000000000000000161631420023252700152330ustar00rootroot00000000000000# Based on appveyor.yml from https://github.com/PDAL/PDAL and https://github.com/ogrisel/python-appveyor-demo platform: x64 environment: global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter # See: http://stackoverflow.com/a/13751649/163740 CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd" GDAL_HOME: "C:\\gdal" PYTHONWARNINGS: "ignore:DEPRECATION::pip._internal.cli.base_command" ENABLE_DEPRECATED_DRIVER_GTM: "YES" matrix: # PYTHON_VERSION and PYTHON_ARCH are required by run_with_env.cmd. # The 4-digit number in the GISInternals archives is the MSVC version used to build # the libraries. It does not need to match the version of MSVC used to build Python. # https://en.wikipedia.org/wiki/Microsoft_Visual_C%2B%2B#Internal_version_numbering - PYTHON: "C:\\Python27-x64" PYTHON_VERSION: "2.7" PYTHON_ARCH: "64" GDAL_VERSION: "1.11.4" GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" - PYTHON: "C:\\Python36-x64" PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" GDAL_VERSION: "1.11.4" GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" - PYTHON: "C:\\Python36-x64" PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" GDAL_VERSION: "2.2.3" GIS_INTERNALS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7-libs.zip" - PYTHON: "C:\\Python36-x64" PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" GDAL_VERSION: "2.3.3" GIS_INTERNALS: "release-1911-x64-gdal-2-3-3-mapserver-7-2-1.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-3-3-mapserver-7-2-1-libs.zip" - PYTHON: "C:\\Python36-x64" PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" GDAL_VERSION: "2.4.2" GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" - PYTHON: "C:\\Python36-x64" PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" GDAL_VERSION: "3.0.4" GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - PYTHON: "C:\\Python37-x64" PYTHON_VERSION: "3.7" PYTHON_ARCH: "64" GDAL_VERSION: "2.4.2" GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" - PYTHON: "C:\\Python37-x64" PYTHON_VERSION: "3.7" PYTHON_ARCH: "64" GDAL_VERSION: "3.0.4" GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - PYTHON: "C:\\Python37-x64" PYTHON_VERSION: "3.7" PYTHON_ARCH: "64" GDAL_VERSION: "3.1.2" GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - PYTHON: "C:\\Python38-x64" PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" GDAL_VERSION: "3.0.4" GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - PYTHON: "C:\\Python38-x64" PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" GDAL_VERSION: "3.1.2" GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" matrix: allow_failures: - GDAL_VERSION: "1.11.4" install: - ECHO "Filesystem root:" - ps: "ls \"C:/\"" - ECHO "Installed SDKs:" - ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\"" # Install Python (from the official .msi of http://python.org) and pip when # not already installed. # - ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 } # Prepend newly installed Python to the PATH of this build (this cannot be # done from inside the powershell script as it would require to restart # the parent CMD process). - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH%" # Check that we have the expected version and architecture for Python - "python --version" - "python -c \"import struct; print(struct.calcsize('P') * 8)\"" - ps: mkdir C:\build | out-null - ps: mkdir C:\gdal | out-null - curl http://download.gisinternals.com/sdk/downloads/%GIS_INTERNALS% --output gdalbin.zip - 7z x gdalbin.zip -oC:\gdal - curl http://download.gisinternals.com/sdk/downloads/%GIS_INTERNALS_LIBS% --output gdallibs.zip - 7z x gdallibs.zip -oC:\gdal - "SET PATH=C:\\gdal;C:\\gdal\\bin;C:\\gdal\\data;C:\\gdal\\bin\\gdal\\apps;%PATH%" - "SET GDAL_DATA=C:\\gdal\\bin\\gdal-data" - "SET PACKAGE_DATA=1" - ECHO "Filesystem C:/GDAL:" - ps: "ls \"C:/GDAL\"" - cd C:\projects\fiona # Upgrade to the latest version of pip to avoid it displaying warnings # about it being out of date. - cmd: python -m pip install --disable-pip-version-check --user --upgrade pip - cmd: python -m pip --version # Install the build dependencies of the project. If some dependencies contain # compiled extensions and are not provided as pre-built wheel packages, # pip will build them from source using the MSVC compiler matching the # target Python version and architecture - "%CMD_IN_ENV% pip install -r requirements-dev.txt" build_script: # Build the compiled extension - cmd: echo %PATH% - cmd: echo %PYTHONPATH% # copy gisinternal gdal librarys into .libs - cmd: xcopy C:\gdal\bin\*.dll fiona\.libs\ - cmd: xcopy C:\gdal\*.rtf fiona\.libs\licenses\ # build fiona and create a wheel - "%CMD_IN_ENV% python setup.py build_ext -IC:\\gdal\\include -lgdal_i -LC:\\gdal\\lib bdist_wheel --gdalversion %GDAL_VERSION%" # install the wheel - ps: python -m pip install --upgrade pip - ps: python -m pip install --no-deps --ignore-installed (gci dist\*.whl | % { "$_" }) - ps: python -m pip freeze - ps: move fiona fiona.build test_script: # Run the project tests - cmd: SET - ps: python -c "import fiona" # Our Windows GDAL doesn't have iconv and can't support certain tests. - "%CMD_IN_ENV% python -m pytest -m \"not iconv and not wheel\" --cov fiona --cov-report term-missing" artifacts: - path: dist\*.whl name: wheel Fiona-1.8.21/appveyor/000077500000000000000000000000001420023252700145015ustar00rootroot00000000000000Fiona-1.8.21/appveyor/install.ps1000066400000000000000000000160331420023252700165770ustar00rootroot00000000000000# Sample script to install Python and pip under Windows # Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer # License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ $MINICONDA_URL = "http://repo.continuum.io/miniconda/" $BASE_URL = "https://www.python.org/ftp/python/" $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" $GET_PIP_PATH = "C:\get-pip.py" $PYTHON_PRERELEASE_REGEX = @" (?x) (?\d+) \. (?\d+) \. (?\d+) (?[a-z]{1,2}\d+) "@ function Download ($filename, $url) { $webclient = New-Object System.Net.WebClient $basedir = $pwd.Path + "\" $filepath = $basedir + $filename if (Test-Path $filename) { Write-Host "Reusing" $filepath return $filepath } # Download and retry up to 3 times in case of network transient errors. Write-Host "Downloading" $filename "from" $url $retry_attempts = 2 for ($i = 0; $i -lt $retry_attempts; $i++) { try { $webclient.DownloadFile($url, $filepath) break } Catch [Exception]{ Start-Sleep 1 } } if (Test-Path $filepath) { Write-Host "File saved at" $filepath } else { # Retry once to get the error message if any at the last try $webclient.DownloadFile($url, $filepath) } return $filepath } function ParsePythonVersion ($python_version) { if ($python_version -match $PYTHON_PRERELEASE_REGEX) { return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro, $matches.prerelease) } $version_obj = [version]$python_version return ($version_obj.major, $version_obj.minor, $version_obj.build, "") } function DownloadPython ($python_version, $platform_suffix) { $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version if (($major -le 2 -and $micro -eq 0) ` -or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) ` ) { $dir = "$major.$minor" $python_version = "$major.$minor$prerelease" } else { $dir = "$major.$minor.$micro" } if ($prerelease) { if (($major -le 2) ` -or ($major -eq 3 -and $minor -eq 1) ` -or ($major -eq 3 -and $minor -eq 2) ` -or ($major -eq 3 -and $minor -eq 3) ` ) { $dir = "$dir/prev" } } if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) { $ext = "msi" if ($platform_suffix) { $platform_suffix = ".$platform_suffix" } } else { $ext = "exe" if ($platform_suffix) { $platform_suffix = "-$platform_suffix" } } $filename = "python-$python_version$platform_suffix.$ext" $url = "$BASE_URL$dir/$filename" $filepath = Download $filename $url return $filepath } function InstallPython ($python_version, $architecture, $python_home) { Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home if (Test-Path $python_home) { Write-Host $python_home "already exists, skipping." return $false } if ($architecture -eq "32") { $platform_suffix = "" } else { $platform_suffix = "amd64" } $installer_path = DownloadPython $python_version $platform_suffix $installer_ext = [System.IO.Path]::GetExtension($installer_path) Write-Host "Installing $installer_path to $python_home" $install_log = $python_home + ".log" if ($installer_ext -eq '.msi') { InstallPythonMSI $installer_path $python_home $install_log } else { InstallPythonEXE $installer_path $python_home $install_log } if (Test-Path $python_home) { Write-Host "Python $python_version ($architecture) installation complete" } else { Write-Host "Failed to install Python in $python_home" Get-Content -Path $install_log Exit 1 } } function InstallPythonEXE ($exepath, $python_home, $install_log) { $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home" RunCommand $exepath $install_args } function InstallPythonMSI ($msipath, $python_home, $install_log) { $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home" $uninstall_args = "/qn /x $msipath" RunCommand "msiexec.exe" $install_args if (-not(Test-Path $python_home)) { Write-Host "Python seems to be installed else-where, reinstalling." RunCommand "msiexec.exe" $uninstall_args RunCommand "msiexec.exe" $install_args } } function RunCommand ($command, $command_args) { Write-Host $command $command_args Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru } function InstallPip ($python_home) { $pip_path = $python_home + "\Scripts\pip.exe" $python_path = $python_home + "\python.exe" if (-not(Test-Path $pip_path)) { Write-Host "Installing pip..." $webclient = New-Object System.Net.WebClient $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) Write-Host "Executing:" $python_path $GET_PIP_PATH & $python_path $GET_PIP_PATH } else { Write-Host "pip already installed." } } function DownloadMiniconda ($python_version, $platform_suffix) { if ($python_version -eq "3.4") { $filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe" } else { $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe" } $url = $MINICONDA_URL + $filename $filepath = Download $filename $url return $filepath } function InstallMiniconda ($python_version, $architecture, $python_home) { Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home if (Test-Path $python_home) { Write-Host $python_home "already exists, skipping." return $false } if ($architecture -eq "32") { $platform_suffix = "x86" } else { $platform_suffix = "x86_64" } $filepath = DownloadMiniconda $python_version $platform_suffix Write-Host "Installing" $filepath "to" $python_home $install_log = $python_home + ".log" $args = "/S /D=$python_home" Write-Host $filepath $args Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru if (Test-Path $python_home) { Write-Host "Python $python_version ($architecture) installation complete" } else { Write-Host "Failed to install Python in $python_home" Get-Content -Path $install_log Exit 1 } } function InstallMinicondaPip ($python_home) { $pip_path = $python_home + "\Scripts\pip.exe" $conda_path = $python_home + "\Scripts\conda.exe" if (-not(Test-Path $pip_path)) { Write-Host "Installing pip..." $args = "install --yes pip" Write-Host $conda_path $args Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru } else { Write-Host "pip already installed." } } function main () { InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON InstallPip $env:PYTHON } main Fiona-1.8.21/appveyor/run_with_env.cmd000066400000000000000000000064461420023252700177070ustar00rootroot00000000000000:: To build extensions for 64 bit Python 3, we need to configure environment :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) :: :: To build extensions for 64 bit Python 2, we need to configure environment :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) :: :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific :: environment configurations. :: :: Note: this script needs to be run with the /E:ON and /V:ON flags for the :: cmd interpreter, at least for (SDK v7.0) :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows :: http://stackoverflow.com/a/13751649/163740 :: :: Author: Olivier Grisel :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ :: :: Notes about batch files for Python people: :: :: Quotes in values are literally part of the values: :: SET FOO="bar" :: FOO is now five characters long: " b a r " :: If you don't want quotes, don't include them on the right-hand side. :: :: The CALL lines at the end of this file look redundant, but if you move them :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y :: case, I don't know why. @ECHO OFF SET COMMAND_TO_RUN=%* SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf :: Extract the major and minor versions, and allow for the minor version to be :: more than 9. This requires the version number to have two dots in it. SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1% IF "%PYTHON_VERSION:~3,1%" == "." ( SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1% ) ELSE ( SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2% ) :: Based on the Python version, determine what SDK version to use, and whether :: to set the SDK for 64-bit. IF %MAJOR_PYTHON_VERSION% == 2 ( SET WINDOWS_SDK_VERSION="v7.0" SET SET_SDK_64=Y ) ELSE ( IF %MAJOR_PYTHON_VERSION% == 3 ( SET WINDOWS_SDK_VERSION="v7.1" IF %MINOR_PYTHON_VERSION% LEQ 4 ( SET SET_SDK_64=Y ) ELSE ( SET SET_SDK_64=N IF EXIST "%WIN_WDK%" ( :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/ REN "%WIN_WDK%" 0wdf ) ) ) ELSE ( ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" EXIT 1 ) ) IF %PYTHON_ARCH% == 64 ( IF %SET_SDK_64% == Y ( ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture SET DISTUTILS_USE_SDK=1 SET MSSdk=1 "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) ELSE ( ECHO Using default MSVC build environment for 64 bit architecture ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) ) ELSE ( ECHO Using default MSVC build environment for 32 bit architecture ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) Fiona-1.8.21/benchmark-max.py000066400000000000000000000022741420023252700157300ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) schema = [] ldefn = layer.GetLayerDefn() for n in range(ldefn.GetFieldCount()): fdefn = ldefn.GetFieldDefn(n) schema.append((fdefn.name, fdefn.type)) for feature in layer: id = feature.GetFID() props = {} for i in range(feature.GetFieldCount()): props[schema[i][0]] = feature.GetField(i) coordinates = [] for part in feature.GetGeometryRef(): ring = [] for i in range(part.GetPointCount()): xy = part.GetPoint(i) ring.append(xy) coordinates.append(ring) source.Destroy() """ print "osgeo.ogr 1.7.2 (maximum)" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.8.21/benchmark-min.py000066400000000000000000000013121420023252700157160ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) for feature in layer: id = feature.GetFID() source.Destroy() """ print "osgeo.ogr 1.7.2 (minimum)" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.8.21/benchmark.py000066400000000000000000000021311420023252700151350ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) schema = [] ldefn = layer.GetLayerDefn() for n in range(ldefn.GetFieldCount()): fdefn = ldefn.GetFieldDefn(n) schema.append((fdefn.name, fdefn.type)) layer.ResetReading() while 1: feature = layer.GetNextFeature() if not feature: break id = feature.GetFID() props = {} for i in range(feature.GetFieldCount()): props[schema[i][0]] = feature.GetField(i) geometry = feature.GetGeometryRef() feature.Destroy() source.Destroy() """ print "osgeo.ogr 1.7.2" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.8.21/docs/000077500000000000000000000000001420023252700135645ustar00rootroot00000000000000Fiona-1.8.21/docs/Makefile000066400000000000000000000130621420023252700152260ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: apidocs $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Fiona.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Fiona.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Fiona" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Fiona" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." apidocs: sphinx-apidoc -f -o . ../fiona @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." Fiona-1.8.21/docs/README.rst000066400000000000000000000001251420023252700152510ustar00rootroot00000000000000.. include:: ../README.rst .. include:: ../CHANGES.txt .. include:: ../CREDITS.txt Fiona-1.8.21/docs/cli.rst000066400000000000000000000243541420023252700150750ustar00rootroot00000000000000Command Line Interface ====================== Fiona's new command line interface is a program named "fio". .. code-block:: console Usage: fio [OPTIONS] COMMAND [ARGS]... Fiona command line interface. Options: -v, --verbose Increase verbosity. -q, --quiet Decrease verbosity. --version Show the version and exit. --gdal-version Show the version and exit. --python-version Show the version and exit. --help Show this message and exit. Commands: bounds Print the extent of GeoJSON objects calc Calculate GeoJSON property by Python expression cat Concatenate and print the features of datasets collect Collect a sequence of features. distrib Distribute features from a collection. dump Dump a dataset to GeoJSON. env Print information about the fio environment. filter Filter GeoJSON features by python expression. info Print information about a dataset. insp Open a dataset and start an interpreter. load Load GeoJSON to a dataset in another format. ls List layers in a datasource. rm Remove a datasource or an individual layer. It is developed using the ``click`` package and is new in 1.1.6. bounds ------ New in 1.4.5. Fio-bounds reads LF or RS-delimited GeoJSON texts, either features or collections, from stdin and prints their bounds with or without other data to stdout. With no options, it works like this: .. code-block:: console $ fio cat docs/data/test_uk.shp | head -n 1 \ > | fio bounds [0.735, 51.357216, 0.947778, 51.444717] Using ``--with-id`` gives you .. code-block:: console $ fio cat docs/data/test_uk.shp | head -n 1 \ > | fio bounds --with-id {"id": "0", "bbox": [0.735, 51.357216, 0.947778, 51.444717]} calc ---- New in 1.7b1 The calc command creates a new property on GeoJSON features using the specified expression. The expression is evaluated in a restricted namespace containing 4 functions (`sum`, `pow`, `min`, `max`), the `math` module, the shapely `shape` function, type conversions (`bool`, `int`, `str`, `len`, `float`), and an object `f` representing the feature to be evaluated. This `f` object allows access in javascript-style dot notation for convenience. The expression will be evaluated for each feature and its return value will be added to the properties as the specified property_name. Existing properties will not be overwritten by default (an `Exception` is raised). .. code-block:: console $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" cat --- The cat command concatenates the features of one or more datasets and prints them as a `JSON text sequence `__ of features. In other words: GeoJSON feature objects, possibly pretty printed, optionally separated by ASCII RS (\x1e) chars using `--rs`. The output of ``fio cat`` can be piped to ``fio load`` to create new concatenated datasets. .. code-block:: console $ fio cat docs/data/test_uk.shp docs/data/test_uk.shp \ > | fio load /tmp/double.shp --driver Shapefile $ fio info /tmp/double.shp --count 96 $ fio info docs/data/test_uk.shp --count 48 New in 1.4.0. collect ------- The collect command takes a JSON text sequence of GeoJSON feature objects, such as the output of ``fio cat`` and writes a GeoJSON feature collection. .. code-block:: console $ fio cat docs/data/test_uk.shp docs/data/test_uk.shp \ > | fio collect > /tmp/collected.json $ fio info /tmp/collected.json --count 96 New in 1.4.0. distrib ------- The inverse of fio-collect, fio-distrib takes a GeoJSON feature collection and writes a JSON text sequence of GeoJSON feature objects. .. code-block:: console $ fio info --count tests/data/coutwildrnp.shp 67 $ fio cat tests/data/coutwildrnp.shp | fio collect | fio distrib | wc -l 67 New in 1.4.0. dump ---- The dump command reads a vector dataset and writes a GeoJSON feature collection to stdout. Its output can be piped to ``fio load`` (see below). .. code-block:: console $ fio dump docs/data/test_uk.shp --indent 2 --precision 2 | head { "features": [ { "geometry": { "coordinates": [ [ [ 0.9, 51.36 ], You can optionally dump out JSON text sequences using ``--x-json-seq``. Since version 1.4.0, ``fio cat`` is the better tool for generating sequences. .. code-block:: console $ fio dump docs/data/test_uk.shp --precision 2 --x-json-seq | head -n 2 {"geometry": {"coordinates": [[[0.9, 51.36], [0.89, 51.36], [0.79, 51.37], [0.78, 51.37], [0.77, 51.38], [0.76, 51.38], [0.75, 51.39], [0.74, 51.4], [0.73, 51.41], [0.74, 51.43], [0.75, 51.44], [0.76, 51.44], [0.79, 51.44], [0.89, 51.42], [0.9, 51.42], [0.91, 51.42], [0.93, 51.4], [0.94, 51.39], [0.94, 51.38], [0.95, 51.38], [0.95, 51.37], [0.95, 51.37], [0.94, 51.37], [0.9, 51.36], [0.9, 51.36]]], "type": "Polygon"}, "id": "0", "properties": {"AREA": 244820.0, "CAT": 232.0, "CNTRY_NAME": "United Kingdom", "FIPS_CNTRY": "UK", "POP_CNTRY": 60270708.0}, "type": "Feature"} {"geometry": {"coordinates": [[[-4.66, 51.16], [-4.67, 51.16], [-4.67, 51.16], [-4.67, 51.17], [-4.67, 51.19], [-4.67, 51.19], [-4.67, 51.2], [-4.66, 51.2], [-4.66, 51.19], [-4.65, 51.16], [-4.65, 51.16], [-4.65, 51.16], [-4.66, 51.16]]], "type": "Polygon"}, "id": "1", "properties": {"AREA": 244820.0, "CAT": 232.0, "CNTRY_NAME": "United Kingdom", "FIPS_CNTRY": "UK", "POP_CNTRY": 60270708.0}, "type": "Feature"} info ---- The info command prints information about a dataset as a JSON object. .. code-block:: console $ fio info docs/data/test_uk.shp --indent 2 { "count": 48, "crs": "+datum=WGS84 +no_defs +proj=longlat", "driver": "ESRI Shapefile", "bounds": [ -8.621389, 49.911659, 1.749444, 60.844444 ], "schema": { "geometry": "Polygon", "properties": { "CAT": "float:16", "FIPS_CNTRY": "str:80", "CNTRY_NAME": "str:80", "AREA": "float:15.2", "POP_CNTRY": "float:15.2" } } } You can process this JSON using, e.g., `underscore-cli `__. .. code-block:: console $ fio info docs/data/test_uk.shp | underscore extract count 48 You can also optionally get single info items as plain text (not JSON) strings .. code-block:: console $ fio info docs/data/test_uk.shp --count 48 $ fio info docs/data/test_uk.shp --bounds -8.621389 49.911659 1.749444 60.844444 load ---- The load command reads GeoJSON features from stdin and writes them to a vector dataset using another format. .. code-block:: console $ fio dump docs/data/test_uk.shp \ > | fio load /tmp/test.shp --driver Shapefile This command also supports GeoJSON text sequences. RS-separated sequences will be detected. If you want to load LF-separated sequences, you must specfiy ``--x-json-seq``. .. code-block:: console $ fio cat docs/data/test_uk.shp | fio load /tmp/foo.shp --driver Shapefile $ fio info /tmp/foo.shp --indent 2 { "count": 48, "crs": "+datum=WGS84 +no_defs +proj=longlat", "driver": "ESRI Shapefile", "bounds": [ -8.621389, 49.911659, 1.749444, 60.844444 ], "schema": { "geometry": "Polygon", "properties": { "AREA": "float:24.15", "CNTRY_NAME": "str:80", "POP_CNTRY": "float:24.15", "FIPS_CNTRY": "str:80", "CAT": "float:24.15" } } } The underscore-cli process command is another way of turning a GeoJSON feature collection into a feature sequence. .. code-block:: console $ fio dump docs/data/test_uk.shp \ > | underscore process \ > 'each(data.features,function(o){console.log(JSON.stringify(o))})' \ > | fio load /tmp/test-seq.shp --x-json-seq --driver Shapefile filter ------ The filter command reads GeoJSON features from stdin and writes the feature to stdout *if* the provided expression evalutates to `True` for that feature. The python expression is evaluated in a restricted namespace containing 3 functions (`sum`, `min`, `max`), the `math` module, the shapely `shape` function, and an object `f` representing the feature to be evaluated. This `f` object allows access in javascript-style dot notation for convenience. If the expression evaluates to a "truthy" value, the feature is printed verbatim. Otherwise, the feature is excluded from the output. .. code-block:: console $ fio cat data.shp \ > | fio filter "f.properties.area > 1000.0" \ > | fio collect > large_polygons.geojson Would create a geojson file with only those features from `data.shp` where the area was over a given threshold. rm -- The ``fio rm`` command deletes an entire datasource or a single layer in a multi-layer datasource. If the datasource is composed of multiple files (e.g. an ESRI Shapefile) all of the files will be removed. .. code-block:: console $ fio rm countries.shp $ fio rm --layer forests land_cover.gpkg New in 1.8.0. Coordinate Reference System Transformations ------------------------------------------- The ``fio cat`` command can optionally transform feature geometries to a new coordinate reference system specified with ``--dst_crs``. The ``fio collect`` command can optionally transform from a coordinate reference system specified with ``--src_crs`` to the default WGS84 GeoJSON CRS. Like collect, ``fio load`` can accept non-WGS84 features, but as it can write files in formats other than GeoJSON, you can optionally specify a ``--dst_crs``. For example, the WGS84 features read from docs/data/test_uk.shp, .. code-block:: console $ fio cat docs/data/test_uk.shp --dst_crs EPSG:3857 \ > | fio collect --src_crs EPSG:3857 > /tmp/foo.json make a detour through EPSG:3857 (Web Mercator) and are transformed back to WGS84 by fio cat. The following, .. code-block:: console $ fio cat docs/data/test_uk.shp --dst_crs EPSG:3857 \ > | fio load --src_crs EPSG:3857 --dst_crs EPSG:4326 --driver Shapefile \ > /tmp/foo.shp does the same thing, but for ESRI Shapefile output. New in 1.4.2. Fiona-1.8.21/docs/conf.py000066400000000000000000000215001420023252700150610ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Fiona documentation build configuration file, created by # sphinx-quickstart on Mon Dec 26 12:16:26 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import fiona import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Fiona' copyright = u'2011, Sean Gillies' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = fiona.__version__ # The full version, including alpha/beta/rc tags. release = fiona.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' html_theme = 'sphinxdoc' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Fionadoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Fiona.tex', u'Fiona Documentation', u'Sean Gillies', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'fiona', u'Fiona Documentation', [u'Sean Gillies'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Fiona', u'Fiona Documentation', u'Sean Gillies', 'Fiona', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'Fiona' epub_author = u'Sean Gillies' epub_publisher = u'Sean Gillies' epub_copyright = u'2011, Sean Gillies' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True Fiona-1.8.21/docs/encoding.txt000066400000000000000000000036251420023252700161210ustar00rootroot00000000000000========================= Fiona and String Encoding ========================= Reading ------- With Fiona, all 'str' type record attributes are unicode strings. The source data is encoded in some way. It might be a standard encoding (ISO-8859-1 or UTF-8) or it might be a format-specific encoding. How do we get from encoded strings to Python unicode? :: encoded File | (decode?) OGR (encode?) | (decode) Fiona E_f R E_i The internal encoding `E_i` is used by the ``FeatureBuilder`` class to create Fiona's record dicts. `E_f` is the encoding of the data file. `R` is ``True`` if OGR is recoding record attribute values to UTF-8 (a recent feature that isn't implemented for all format drivers, hence the question marks in the sketch above), else ``False``. The value of E_i is determined like this:: E_i = (R and 'utf-8') or E_f In the real world of sloppy data, we may not know the exact encoding of the data file. Fiona's best guess at it is this:: E_f = E_u or (R and E_o) or (S and 'iso-8859-1') or E_p `E_u`, here, is any encoding provided by the programmer (through the ``Collection`` constructor). `E_o` is an encoding detected by OGR (which doesn't provide an API to get the detected encoding). `S` is ``True`` if the file is a Shapefile (because that's the format default). `E_p` is locale.getpreferredencoding(). Bottom line: if you know that your data file has an encoding other than ISO-8859-1, specify it. If you don't know what the encoding is, you can let the format driver try to figure it out (Requires GDAL 1.9.1+). Writing ------- On the writing side:: Fiona (encode) | (decode?) OGR (encode?) | encoded File E_i R E_f We derive `E_i` from `R` and `E_f` again as above. `E_f` is:: E_f = E_u or (S and 'iso-8859-1') or E_p Appending --------- The diagram is the same as above, but `E_f` is as in the Reading section. Fiona-1.8.21/docs/fiona.fio.rst000066400000000000000000000044571420023252700162000ustar00rootroot00000000000000fiona.fio package ================= Submodules ---------- fiona.fio.bounds module ----------------------- .. automodule:: fiona.fio.bounds :members: :undoc-members: :show-inheritance: fiona.fio.calc module --------------------- .. automodule:: fiona.fio.calc :members: :undoc-members: :show-inheritance: fiona.fio.cat module -------------------- .. automodule:: fiona.fio.cat :members: :undoc-members: :show-inheritance: fiona.fio.collect module ------------------------ .. automodule:: fiona.fio.collect :members: :undoc-members: :show-inheritance: fiona.fio.distrib module ------------------------ .. automodule:: fiona.fio.distrib :members: :undoc-members: :show-inheritance: fiona.fio.dump module --------------------- .. automodule:: fiona.fio.dump :members: :undoc-members: :show-inheritance: fiona.fio.env module -------------------- .. automodule:: fiona.fio.env :members: :undoc-members: :show-inheritance: fiona.fio.filter module ----------------------- .. automodule:: fiona.fio.filter :members: :undoc-members: :show-inheritance: fiona.fio.helpers module ------------------------ .. automodule:: fiona.fio.helpers :members: :undoc-members: :show-inheritance: fiona.fio.info module --------------------- .. automodule:: fiona.fio.info :members: :undoc-members: :show-inheritance: fiona.fio.insp module --------------------- .. automodule:: fiona.fio.insp :members: :undoc-members: :show-inheritance: fiona.fio.load module --------------------- .. automodule:: fiona.fio.load :members: :undoc-members: :show-inheritance: fiona.fio.ls module ------------------- .. automodule:: fiona.fio.ls :members: :undoc-members: :show-inheritance: fiona.fio.main module --------------------- .. automodule:: fiona.fio.main :members: :undoc-members: :show-inheritance: fiona.fio.options module ------------------------ .. automodule:: fiona.fio.options :members: :undoc-members: :show-inheritance: fiona.fio.rm module ------------------- .. automodule:: fiona.fio.rm :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: fiona.fio :members: :undoc-members: :show-inheritance: Fiona-1.8.21/docs/fiona.rst000066400000000000000000000033771420023252700154240ustar00rootroot00000000000000fiona package ============= Subpackages ----------- .. toctree:: fiona.fio Submodules ---------- fiona.collection module ----------------------- .. automodule:: fiona.collection :members: :undoc-members: :show-inheritance: fiona.compat module ------------------- .. automodule:: fiona.compat :members: :undoc-members: :show-inheritance: fiona.crs module ---------------- .. automodule:: fiona.crs :members: :undoc-members: :show-inheritance: fiona.drvsupport module ----------------------- .. automodule:: fiona.drvsupport :members: :undoc-members: :show-inheritance: fiona.errors module ------------------- .. automodule:: fiona.errors :members: :undoc-members: :show-inheritance: fiona.inspector module ---------------------- .. automodule:: fiona.inspector :members: :undoc-members: :show-inheritance: fiona.ogrext module ------------------- .. automodule:: fiona.ogrext :members: :undoc-members: :show-inheritance: fiona.ogrext1 module -------------------- .. automodule:: fiona.ogrext1 :members: :undoc-members: :show-inheritance: fiona.ogrext2 module -------------------- .. automodule:: fiona.ogrext2 :members: :undoc-members: :show-inheritance: fiona.rfc3339 module -------------------- .. automodule:: fiona.rfc3339 :members: :undoc-members: :show-inheritance: fiona.tool module ----------------- .. automodule:: fiona.tool :members: :undoc-members: :show-inheritance: fiona.transform module ---------------------- .. automodule:: fiona.transform :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: fiona :members: :undoc-members: :show-inheritance: Fiona-1.8.21/docs/index.rst000066400000000000000000000004231420023252700154240ustar00rootroot00000000000000Fiona Documentation Contents ============================ .. toctree:: :maxdepth: 2 README User Manual API Documentation CLI Documentation Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` Fiona-1.8.21/docs/manual.rst000066400000000000000000001376741420023252700156150ustar00rootroot00000000000000===================== The Fiona User Manual ===================== :Author: Sean Gillies, :Version: |release| :Date: |today| :Copyright: This work is licensed under a `Creative Commons Attribution 3.0 United States License`__. .. __: http://creativecommons.org/licenses/by/3.0/us/ :Abstract: Fiona is OGR's neat, nimble, no-nonsense API. This document explains how to use the Fiona package for reading and writing geospatial data files. Python 3 is used in examples. See the `README `__ for installation and quick start instructions. .. sectnum:: Introduction ============ :dfn:`Geographic information systems` (GIS) help us plan, react to, and understand changes in our physical, political, economic, and cultural landscapes. A generation ago, GIS was something done only by major institutions like nations and cities, but it's become ubiquitous today thanks to accurate and inexpensive global positioning systems, commoditization of satellite imagery, and open source software. The kinds of data in GIS are roughly divided into :dfn:`rasters` representing continuous scalar fields (land surface temperature or elevation, for example) and :dfn:`vectors` representing discrete entities like roads and administrative boundaries. Fiona is concerned exclusively with the latter. It is a Python wrapper for vector data access functions from the `GDAL/OGR `_ library. A very simple wrapper for minimalists. It reads data records from files as GeoJSON-like mappings and writes the same kind of mappings as records back to files. That's it. There are no layers, no cursors, no geometric operations, no transformations between coordinate systems, no remote method calls; all these concerns are left to other Python packages such as :py:mod:`Shapely ` and :py:mod:`pyproj ` and Python language protocols. Why? To eliminate unnecessary complication. Fiona aims to be simple to understand and use, with no gotchas. Please understand this: Fiona is designed to excel in a certain range of tasks and is less optimal in others. Fiona trades memory and speed for simplicity and reliability. Where OGR's Python bindings (for example) use C pointers, Fiona copies vector data from the data source to Python objects. These are simpler and safer to use, but more memory intensive. Fiona's performance is relatively more slow if you only need access to a single record field – and of course if you just want to reproject or filter data files, nothing beats the :command:`ogr2ogr` program – but Fiona's performance is much better than OGR's Python bindings if you want *all* fields and coordinates of a record. The copying is a constraint, but it simplifies programs. With Fiona, you don't have to track references to C objects to avoid crashes, and you can work with vector data using familiar Python mapping accessors. Less worry, less time spent reading API documentation. Rules of Thumb -------------- In what cases would you benefit from using Fiona? * If the features of interest are from or destined for a file in a non-text format like ESRI Shapefiles, Mapinfo TAB files, etc. * If you're more interested in the values of many feature properties than in a single property's value. * If you're more interested in all the coordinate values of a feature's geometry than in a single value. * If your processing system is distributed or not contained to a single process. In what cases would you not benefit from using Fiona? * If your data is in or destined for a JSON document you should use Python's :py:mod:`json` or :py:mod:`simplejson` modules. * If your data is in a RDBMS like PostGIS, use a Python DB package or ORM like :py:mod:`SQLAlchemy` or :py:mod:`GeoAlchemy`. Maybe you're using :py:mod:`GeoDjango` already. If so, carry on. * If your data is served via HTTP from CouchDB or CartoDB, etc, use an HTTP package (:py:mod:`httplib2`, :py:mod:`Requests`, etc) or the provider's Python API. * If you can use :command:`ogr2ogr`, do so. Example ------- The first example of using Fiona is this: copying records from one file to another, adding two attributes and making sure that all polygons are facing "up". Orientation of polygons is significant in some applications, extruded polygons in Google Earth for one. No other library (like :py:mod:`Shapely`) is needed here, which keeps it uncomplicated. There's a :file:`test_uk` file in the Fiona repository for use in this and other examples. .. sourcecode:: python import datetime import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def signed_area(coords): """Return the signed area enclosed by a ring using the linear time algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 indicates a counter-clockwise oriented ring. """ xs, ys = map(list, zip(*coords)) xs.append(xs[1]) ys.append(ys[1]) return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 with fiona.open('docs/data/test_uk.shp', 'r') as source: # Copy the source schema and add two new properties. sink_schema = source.schema sink_schema['properties']['s_area'] = 'float' sink_schema['properties']['timestamp'] = 'datetime' # Create a sink for processed features with the same format and # coordinate reference system as the source. with fiona.open( 'oriented-ccw.shp', 'w', crs=source.crs, driver=source.driver, schema=sink_schema, ) as sink: for f in source: try: # If any feature's polygon is facing "down" (has rings # wound clockwise), its rings will be reordered to flip # it "up". g = f['geometry'] assert g['type'] == "Polygon" rings = g['coordinates'] sa = sum(signed_area(r) for r in rings) if sa < 0.0: rings = [r[::-1] for r in rings] g['coordinates'] = rings f['geometry'] = g # Add the signed area of the polygon and a timestamp # to the feature properties map. f['properties'].update( s_area=sa, timestamp=datetime.datetime.now().isoformat() ) sink.write(f) except Exception as e: logging.exception("Error processing feature %s:", f['id']) # The sink file is written to disk and closed when its block ends. Data Model ========== Discrete geographic features are usually represented in geographic information systems by :dfn:`records`. The characteristics of records and their semantic implications are well known [Kent1978]_. Among those most significant for geographic data: records have a single type, all records of that type have the same fields, and a record's fields concern a single geographic feature. Different systems model records in different ways, but the various models have enough in common that programmers have been able to create useful abstract data models. The `OGR model `__ is one. Its primary entities are :dfn:`Data Sources`, :dfn:`Layers`, and :dfn:`Features`. Features have not fields, but attributes and a :dfn:`Geometry`. An OGR Layer contains Features of a single type ("roads" or "wells", for example). The GeoJSON model is a bit more simple, keeping Features and substituting :dfn:`Feature Collections` for OGR Data Sources and Layers. The term "Feature" is thus overloaded in GIS modeling, denoting entities in both our conceptual and data models. Various formats for record files exist. The :dfn:`ESRI Shapefile` [ESRI1998]_ has been, at least in the United States, the most significant of these up to about 2005 and remains popular today. It is a binary format. The shape fields are stored in one .shp file and the other fields in another .dbf file. The GeoJSON [GeoJSON]_ format, from 2008, proposed a human readable text format in which geometry and other attribute fields are encoded together using :dfn:`Javascript Object Notation` [JSON]_. In GeoJSON, there's a uniformity of data access. Attributes of features are accessed in the same manner as attributes of a feature collection. Coordinates of a geometry are accessed in the same manner as features of a collection. The GeoJSON format turns out to be a good model for a Python API. JSON objects and Python dictionaries are semantically and syntactically similar. Replacing object-oriented Layer and Feature APIs with interfaces based on Python mappings provides a uniformity of access to data and reduces the amount of time spent reading documentation. A Python programmer knows how to use a mapping, so why not treat features as dictionaries? Use of existing Python idioms is one of Fiona's major design principles. .. admonition:: TL;DR Fiona subscribes to the conventional record model of data, but provides GeoJSON-like access to the data via Python file-like and mapping protocols. Reading Vector Data =================== Reading a GIS vector file begins by opening it in mode ``'r'`` using Fiona's :py:func:`~fiona.open` function. It returns an opened :py:class:`~fiona.collection.Collection` object. .. sourcecode:: pycon >>> import fiona >>> c = fiona.open('docs/data/test_uk.shp', 'r') >>> c >>> c.closed False .. admonition:: API Change :py:func:`fiona.collection` is deprecated, but aliased to :py:func:`fiona.open` in version 0.9. Mode ``'r'`` is the default and will be omitted in following examples. Fiona's :py:class:`~fiona.collection.Collection` is like a Python :py:class:`file`, but is iterable for records rather than lines. .. sourcecode:: pycon >>> next(c) {'geometry': {'type': 'Polygon', 'coordinates': ... >>> len(list(c)) 48 Note that :py:func:`list` iterates over the entire collection, effectively emptying it as with a Python :py:class:`file`. .. sourcecode:: pycon >>> next(c) Traceback (most recent call last): ... StopIteration >>> len(list(c)) 0 Seeking the beginning of the file is not supported. You must reopen the collection to get back to the beginning. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> len(list(c)) 48 .. admonition:: File Encoding The format drivers will attempt to detect the encoding of your data, but may fail. In my experience GDAL 1.7.2 (for example) doesn't detect that the encoding of the Natural Earth dataset is Windows-1252. In this case, the proper encoding can be specified explicitly by using the ``encoding`` keyword parameter of :py:func:`fiona.open`: ``encoding='Windows-1252'``. New in version 0.9.1. Collection indexing ------------------- Features of a collection may also be accessed by index. .. code-block:: pycon >>> import pprint >>> with fiona.open('docs/data/test_uk.shp') as src: ... pprint.pprint(src[1]) ... {'geometry': {'coordinates': [[(-4.663611, 51.158333), (-4.669168, 51.159439), (-4.673334, 51.161385), (-4.674445, 51.165276), (-4.67139, 51.185272), (-4.669445, 51.193054), (-4.665556, 51.195), (-4.65889, 51.195), (-4.656389, 51.192215), (-4.646389, 51.164444), (-4.646945, 51.160828), (-4.651668, 51.159439), (-4.663611, 51.158333)]], 'type': 'Polygon'}, 'id': '1', 'properties': OrderedDict([(u'CAT', 232.0), (u'FIPS_CNTRY', u'UK'), (u'CNTRY_NAME', u'United Kingdom'), (u'AREA', 244820.0), (u'POP_CNTRY', 60270708.0)]), 'type': 'Feature'} Note that these indices are controlled by GDAL, and do not always follow Python conventions. They can start from 0, 1 (e.g. geopackages), or even other values, and have no guarantee of contiguity. Negative indices will only function correctly if indices start from 0 and are contiguous. New in version 1.1.6 Closing Files ------------- A :py:class:`~fiona.collection.Collection` involves external resources. There's no guarantee that these will be released unless you explicitly :py:meth:`~fiona.collection.Collection.close` the object or use a :keyword:`with` statement. When a :py:class:`~fiona.collection.Collection` is a context guard, it is closed no matter what happens within the block. .. sourcecode:: pycon >>> try: ... with fiona.open('docs/data/test_uk.shp') as c: ... print(len(list(c))) ... assert True is False ... except: ... print(c.closed) ... raise ... 48 True Traceback (most recent call last): ... AssertionError An exception is raised in the :keyword:`with` block above, but as you can see from the print statement in the :keyword:`except` clause :py:meth:`c.__exit__` (and thereby :py:meth:`c.close`) has been called. .. important:: Always call :py:meth:`~fiona.collection.Collection.close` or use :keyword:`with` and you'll never stumble over tied-up external resources, locked files, etc. Format Drivers, CRS, Bounds, and Schema ======================================= In addition to attributes like those of :py:class:`file` (:py:attr:`~file.name`, :py:attr:`~file.mode`, :py:attr:`~file.closed`), a :py:class:`~fiona.collection.Collection` has a read-only :py:attr:`~fiona.collection.Collection.driver` attribute which names the :program:`OGR` :dfn:`format driver` used to open the vector file. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> c.driver 'ESRI Shapefile' The :dfn:`coordinate reference system` (CRS) of the collection's vector data is accessed via a read-only :py:attr:`~fiona.collection.Collection.crs` attribute. .. sourcecode:: pycon >>> c.crs {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} The CRS is represented by a mapping of :program:`PROJ.4` parameters. The :py:mod:`fiona.crs` module provides 3 functions to assist with these mappings. :py:func:`~fiona.crs.to_string` converts mappings to PROJ.4 strings: .. sourcecode:: pycon >>> from fiona.crs import to_string >>> print(to_string(c.crs)) +datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat :py:func:`~fiona.crs.from_string` does the inverse. .. sourcecode:: pycon >>> from fiona.crs import from_string >>> from_string("+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat") {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} :py:func:`~fiona.crs.from_epsg` is a shortcut to CRS mappings from EPSG codes. .. sourcecode:: pycon >>> from fiona.crs import from_epsg >>> from_epsg(3857) {'init': 'epsg:3857', 'no_defs': True} The number of records in the collection's file can be obtained via Python's built in :py:func:`len` function. .. sourcecode:: pycon >>> len(c) 48 The :dfn:`minimum bounding rectangle` (MBR) or :dfn:`bounds` of the collection's records is obtained via a read-only :py:attr:`~fiona.collection.Collection.bounds` attribute. .. sourcecode:: pycon >>> c.bounds (-8.621389, 49.911659, 1.749444, 60.844444) Finally, the schema of its record type (a vector file has a single type of record, remember) is accessed via a read-only :py:attr:`~fiona.collection.Collection.schema` attribute. It has 'geometry' and 'properties' items. The former is a string and the latter is an ordered dict with items having the same order as the fields in the data file. .. sourcecode:: pycon >>> import pprint >>> pprint.pprint(c.schema) {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} Keeping Schemas Simple ---------------------- Fiona takes a less is more approach to record types and schemas. Data about record types is structured as closely to data about records as can be done. Modulo a record's 'id' key, the keys of a schema mapping are the same as the keys of the collection's record mappings. .. sourcecode:: pycon >>> rec = next(c) >>> set(rec.keys()) - set(c.schema.keys()) {'id'} >>> set(rec['properties'].keys()) == set(c.schema['properties'].keys()) True The values of the schema mapping are either additional mappings or field type names like 'Polygon', 'float', and 'str'. The corresponding Python types can be found in a dictionary named :py:attr:`fiona.FIELD_TYPES_MAP`. .. sourcecode:: pycon >>> pprint.pprint(fiona.FIELD_TYPES_MAP) {'date': , 'datetime': , 'float': , 'int': , 'str': , 'time': } Field Types ----------- In a nutshell, the types and their names are as near to what you'd expect in Python (or Javascript) as possible. The 'str' vs 'unicode' muddle is a fact of life in Python < 3.0. Fiona records have Unicode strings, but their field type name is 'str' (looking forward to Python 3). .. sourcecode:: pycon >>> type(rec['properties']['CNTRY_NAME']) >>> c.schema['properties']['CNTRY_NAME'] 'str' >>> fiona.FIELD_TYPES_MAP[c.schema['properties']['CNTRY_NAME']] String type fields may also indicate their maximum width. A value of 'str:25' indicates that all values will be no longer than 25 characters. If this value is used in the schema of a file opened for writing, values of that property will be truncated at 25 characters. The default width is 80 chars, which means 'str' and 'str:80' are more or less equivalent. Fiona provides a function to get the width of a property. .. sourcecode:: pycon >>> from fiona import prop_width >>> prop_width('str:25') 25 >>> prop_width('str') 80 Another function gets the proper Python type of a property. .. sourcecode:: pycon >>> from fiona import prop_type >>> prop_type('int') >>> prop_type('float') >>> prop_type('str:25') The example above is for Python 3. With Python 2, the type of 'str' properties is 'unicode'. .. sourcecode:: pycon >>> prop_type('str:25') Geometry Types -------------- Fiona supports the geometry types in GeoJSON and their 3D variants. This means that the value of a schema's geometry item will be one of the following: - Point - LineString - Polygon - MultiPoint - MultiLineString - MultiPolygon - GeometryCollection - 3D Point - 3D LineString - 3D Polygon - 3D MultiPoint - 3D MultiLineString - 3D MultiPolygon - 3D GeometryCollection The last seven of these, the 3D types, apply only to collection schema. The geometry types of features are always one of the first seven. A '3D Point' collection, for example, always has features with geometry type 'Point'. The coordinates of those geometries will be (x, y, z) tuples. Note that one of the most common vector data formats, Esri's Shapefile, has no 'MultiLineString' or 'MultiPolygon' schema geometries. However, a Shapefile that indicates 'Polygon' in its schema may yield either 'Polygon' or 'MultiPolygon' features. Records ======= A record you get from a collection is a Python :py:class:`dict` structured exactly like a GeoJSON Feature. Fiona records are self-describing; the names of its fields are contained within the data structure and the values in the fields are typed properly for the type of record. Numeric field values are instances of type :py:class:`int` and :py:class:`float`, for example, not strings. .. sourcecode:: pycon >>> pprint.pprint(rec) {'geometry': {'coordinates': [[(-4.663611, 51.158333), (-4.669168, 51.159439), (-4.673334, 51.161385), (-4.674445, 51.165276), (-4.67139, 51.185272), (-4.669445, 51.193054), (-4.665556, 51.195), (-4.65889, 51.195), (-4.656389, 51.192215), (-4.646389, 51.164444), (-4.646945, 51.160828), (-4.651668, 51.159439), (-4.663611, 51.158333)]], 'type': 'Polygon'}, 'id': '1', 'properties': {'CAT': 232.0, 'FIPS_CNTRY': 'UK', 'CNTRY_NAME': 'United Kingdom', 'AREA': 244820.0, 'POP_CNTRY': 60270708.0}} The record data has no references to the :py:class:`~fiona.collection.Collection` from which it originates or to any other external resource. It's entirely independent and safe to use in any way. Closing the collection does not affect the record at all. .. sourcecode:: pycon >>> c.close() >>> rec['id'] '1' Record Id --------- A record has an ``id`` key. As in the GeoJSON specification, its corresponding value is a string unique within the data file. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> rec = next(c) >>> rec['id'] '0' .. admonition:: OGR Details In the :program:`OGR` model, feature ids are long integers. Fiona record ids are therefore usually string representations of integer record indexes. Record Properties ----------------- A record has a ``properties`` key. Its corresponding value is a mapping: an ordered dict to be precise. The keys of the properties mapping are the same as the keys of the properties mapping in the schema of the collection the record comes from (see above). .. sourcecode:: pycon >>> pprint.pprint(rec['properties']) {'CAT': 232.0, 'FIPS_CNTRY': 'UK', 'CNTRY_NAME': 'United Kingdom', 'AREA': 244820.0, 'POP_CNTRY': 60270708.0} Record Geometry --------------- A record has a ``geometry`` key. Its corresponding value is a mapping with ``type`` and ``coordinates`` keys. .. sourcecode:: pycon >>> pprint.pprint(rec['geometry']) {'coordinates': [[(0.899167, 51.357216), (0.885278, 51.35833), (0.7875, 51.369438), (0.781111, 51.370552), (0.766111, 51.375832), (0.759444, 51.380829), (0.745278, 51.39444), (0.740833, 51.400276), (0.735, 51.408333), (0.740556, 51.429718), (0.748889, 51.443604), (0.760278, 51.444717), (0.791111, 51.439995), (0.892222, 51.421387), (0.904167, 51.418884), (0.908889, 51.416939), (0.930555, 51.398888), (0.936667, 51.393608), (0.943889, 51.384995), (0.9475, 51.378609), (0.947778, 51.374718), (0.946944, 51.371109), (0.9425, 51.369164), (0.904722, 51.358055), (0.899167, 51.357216)]], 'type': 'Polygon'} Since the coordinates are just tuples, or lists of tuples, or lists of lists of tuples, the ``type`` tells you how to interpret them. +-------------------+---------------------------------------------------+ | Type | Coordinates | +===================+===================================================+ | Point | A single (x, y) tuple | +-------------------+---------------------------------------------------+ | LineString | A list of (x, y) tuple vertices | +-------------------+---------------------------------------------------+ | Polygon | A list of rings (each a list of (x, y) tuples) | +-------------------+---------------------------------------------------+ | MultiPoint | A list of points (each a single (x, y) tuple) | +-------------------+---------------------------------------------------+ | MultiLineString | A list of lines (each a list of (x, y) tuples) | +-------------------+---------------------------------------------------+ | MultiPolygon | A list of polygons (see above) | +-------------------+---------------------------------------------------+ Fiona, like the GeoJSON format, has both Northern Hemisphere "North is up" and Cartesian "X-Y" biases. The values within a tuple that denoted as ``(x, y)`` above are either (longitude E of the prime meridian, latitude N of the equator) or, for other projected coordinate systems, (easting, northing). .. admonition:: Long-Lat, not Lat-Long Even though most of us say "lat, long" out loud, Fiona's ``x,y`` is always easting, northing, which means ``(long, lat)``. Longitude first and latitude second, consistent with the GeoJSON format specification. Point Set Theory and Simple Features ------------------------------------ In a proper, well-scrubbed vector data file the geometry mappings explained above are representations of geometric objects made up of :dfn:`point sets`. The following .. sourcecode:: python {'type': 'LineString', 'coordinates': [(0.0, 0.0), (0.0, 1.0)]} represents not just two points, but the set of infinitely many points along the line of length 1.0 from ``(0.0, 0.0)`` to ``(0.0, 1.0)``. In the application of point set theory commonly called :dfn:`Simple Features Access` [SFA]_ two geometric objects are equal if their point sets are equal whether they are equal in the Python sense or not. If you have Shapely (which implements Simple Features Access) installed, you can see this in by verifying the following. .. sourcecode:: pycon >>> from shapely.geometry import shape >>> l1 = shape( ... {'type': 'LineString', 'coordinates': [(0, 0), (2, 2)]}) >>> l2 = shape( ... {'type': 'LineString', 'coordinates': [(0, 0), (1, 1), (2, 2)]}) >>> l1 == l2 False >>> l1.equals(l2) True .. admonition:: Dirty data Some files may contain vectors that are :dfn:`invalid` from a simple features standpoint due to accident (inadequate quality control on the producer's end), intention ("dirty" vectors saved to a file for special treatment) or discrepancies of the numeric precision models (Fiona can't handle fixed precision models yet). Fiona doesn't sniff for or attempt to clean dirty data, so make sure you're getting yours from a clean source. Writing Vector Data =================== A vector file can be opened for writing in mode ``'a'`` (append) or mode ``'w'`` (write). .. admonition:: Note The in situ "update" mode of :program:`OGR` is quite format dependent and is therefore not supported by Fiona. Appending Data to Existing Files -------------------------------- Let's start with the simplest if not most common use case, adding new records to an existing file. The file is copied before modification and a suitable record extracted in the example below. .. sourcecode:: pycon >>> with fiona.open('docs/data/test_uk.shp') as c: ... rec = next(c) >>> rec['id'] = '-1' >>> rec['properties']['CNTRY_NAME'] = 'Gondor' >>> import os >>> os.system("cp docs/data/test_uk.* /tmp") 0 The coordinate reference system. format, and schema of the file are already defined, so it's opened with just two arguments as for reading, but in ``'a'`` mode. The new record is written to the end of the file using the :py:meth:`~fiona.collection.Collection.write` method. Accordingly, the length of the file grows from 48 to 49. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... print(len(c)) ... c.write(rec) ... print(len(c)) ... 48 49 The record you write must match the file's schema (because a file contains one type of record, remember). You'll get a :py:class:`ValueError` if it doesn't. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... c.write({'properties': {'foo': 'bar'}}) ... Traceback (most recent call last): ... ValueError: Record data not match collection schema Now, what about record ids? The id of a record written to a file is ignored and replaced by the next value appropriate for the file. If you read the file just appended to above, .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... records = list(c) >>> records[-1]['id'] '48' >>> records[-1]['properties']['CNTRY_NAME'] 'Gondor' You'll see that the id of ``'-1'`` which the record had when written is replaced by ``'48'``. The :py:meth:`~fiona.collection.Collection.write` method writes a single record to the collection's file. Its sibling :py:meth:`~fiona.collection.Collection.writerecords` writes a sequence (or iterator) of records. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... c.writerecords([rec, rec, rec]) ... print(len(c)) ... 52 .. admonition:: Duplication Fiona's collections do not guard against duplication. The code above will write 3 duplicate records to the file, and they will be given unique sequential ids. .. admonition:: Buffering Fiona's output is buffered. The records passed to :py:meth:`write` and :py:meth:`writerecords` are flushed to disk when the collection is closed. You may also call :py:meth:`flush` periodically to write the buffer contents to disk. Creating files of the same structure ------------------------------------ Writing a new file is more complex than appending to an existing file because the file CRS, format, and schema have not yet been defined and must be done so by the programmer. Still, it's not very complicated. A schema is just a mapping, as described above. A CRS is also just a mapping, and the possible formats are enumerated in the :py:attr:`fiona.supported_drivers` dictionary. Review the parameters of our demo file. .. sourcecode:: pycon >>> with fiona.open('docs/data/test_uk.shp') as source: ... source_driver = source.driver ... source_crs = source.crs ... source_schema = source.schema ... >>> source_driver 'ESRI Shapefile' >>> source_crs {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} >>> pprint.pprint(source_schema) {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} We can create a new file using them. .. sourcecode:: pycon >>> with fiona.open( ... '/tmp/foo.shp', ... 'w', ... driver=source_driver, ... crs=source_crs, ... schema=source_schema) as c: ... print(len(c)) ... c.write(rec) ... print(len(c)) ... 0 1 >>> c.closed True >>> len(c) 1 Because the properties of the source schema are ordered and are passed in the same order to the write-mode collection, the written file's fields have the same order as those of the source file. .. sourcecode:: console $ ogrinfo /tmp/foo.shp foo -so INFO: Open of `/tmp/foo.shp' using driver `ESRI Shapefile' successful. Layer name: foo Geometry: 3D Polygon Feature Count: 1 Extent: (0.735000, 51.357216) - (0.947778, 51.444717) Layer SRS WKT: GEOGCS["GCS_WGS_1984", DATUM["WGS_1984", SPHEROID["WGS_84",6378137,298.257223563]], PRIMEM["Greenwich",0], UNIT["Degree",0.017453292519943295]] CAT: Real (16.0) FIPS_CNTRY: String (80.0) CNTRY_NAME: String (80.0) AREA: Real (15.2) POP_CNTRY: Real (15.2) The :py:attr:`~fiona.collection.Collection.meta` attribute makes duplication of a file's meta properties even easier. .. sourcecode:: pycon >>> source = fiona.open('docs/data/test_uk.shp') >>> sink = fiona.open('/tmp/foo.shp', 'w', **source.meta) Writing new files from scratch ------------------------------- To write a new file from scratch we have to define our own specific driver, crs and schema. To ensure the order of the attribute fields is predictable, in both the schema and the actual manifestation as feature attributes, we will use ordered dictionaries. .. sourcecode:: pycon >>> from collections import OrderedDict Consider the following record, structured in accordance to the `Python geo protocol `__, representing the Eiffel Tower using a point geometry with UTM coordinates in zone 31N. .. sourcecode:: pycon >>> eiffel_tower = { ... 'geometry': { ... 'type': 'Point', ... 'coordinates': (448252, 5411935) ... }, ... 'properties': OrderedDict([ ... ('name', 'Eiffel Tower'), ... ('height', 300.01), ... ('view', 'scenic'), ... ('year', 1889) ... ]) ... } A corresponding scheme could be: .. sourcecode:: pycon >>> landmarks_schema = { ... 'geometry': 'Point', ... 'properties': OrderedDict([ ... ('name', 'str'), ... ('height', 'float'), ... ('view', 'str'), ... ('year', 'int') ... ]) ... } The coordinate reference system of these landmark coordinates is ETRS89 / UTM zone 31N which is referenced in the EPSG database as EPSG:25831. .. sourcecode:: pycon >>> from fiona.crs import from_epsg >>> landmarks_crs = from_epsg(25831) An appropriate driver could be: .. sourcecode:: pycon >>> output_driver = "GeoJSON" Having specified schema, crs and driver, we are ready to open a file for writing our record: .. sourcecode:: pycon >>> with fiona.open( ... '/tmp/foo.geojson', ... 'w', ... driver=output_driver, ... crs=landmarks_crs, ... schema=landmarks_schema) as c: ... c.write(eiffel_tower) ... >>> import pprint >>> with fiona.open('/tmp/foo.geojson') as source: ... for record in source: ... pprint.pprint(record) {'geometry': {'coordinates': (448252.0, 5411935.0), 'type': 'Point'}, 'id': '0', 'properties': OrderedDict([('name', 'Eiffel Tower'), ('height', 300.01), ('view', 'scenic'), ('year', 1889)]), 'type': 'Feature'} Ordering Record Fields ...................... Beginning with Fiona 1.0.1, the 'properties' item of :py:func:`fiona.open`'s 'schema' keyword argument may be an ordered dict or a list of (key, value) pairs, specifying an ordering that carries into written files. If an ordinary dict is given, the ordering is determined by the output of that dict's :py:func:`~items` method. For example, since .. sourcecode:: pycon >>> {'bar': 'int', 'foo': 'str'}.keys() ['foo', 'bar'] a schema of ``{'properties': {'bar': 'int', 'foo': 'str'}}`` will produce a shapefile where the first field is 'foo' and the second field is 'bar'. If you want 'bar' to be the first field, you must use a list of property items .. sourcecode:: pycon c = fiona.open( '/tmp/file.shp', 'w', schema={'properties': [('bar', 'int'), ('foo', 'str')], ...}, ... ) or an ordered dict. .. sourcecode:: pycon from collections import OrderedDict schema_props = OrderedDict([('bar', 'int'), ('foo', 'str')]) c = fiona.open( '/tmp/file.shp', 'w', schema={'properties': schema_props, ...}, ... ) Coordinates and Geometry Types ------------------------------ If you write 3D coordinates, ones having (x, y, z) tuples, to a 2D file ('Point' schema geometry, for example) the z values will be lost. If you write 2D coordinates, ones having only (x, y) tuples, to a 3D file ('3D Point' schema geometry, for example) a default z value of 0 will be provided. Advanced Topics =============== OGR configuration options ------------------------- GDAL/OGR has a large number of features that are controlled by global or thread-local configuration options. Fiona allows you to configure these options using a context manager, ``fiona.Env``. This class's constructor takes GDAL/OGR configuration options as keyword arguments. To see debugging information from GDAL/OGR, for example, you may do the following. .. sourcecode:: python import logging import fiona logging.basicConfig(level=logging.DEBUG) with fiona.Env(CPL_DEBUG=True): fiona.open('tests/data/coutwildrnp.shp') The following extra messages will appear in the Python logger's output.:: DEBUG:fiona._env:CPLE_None in GNM: GNMRegisterAllInternal DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMFile DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMdatabase DEBUG:fiona._env:CPLE_None in GNM: GNMRegisterAllInternal DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMFile DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMdatabase DEBUG:fiona._env:CPLE_None in GDAL: GDALOpen(tests/data/coutwildrnp.shp, this=0x1683930) succeeds as ESRI Shapefile. If you call ``fiona.open()`` with no surrounding ``Env`` environment, one will be created for you. When your program exits the environent's with block the configuration reverts to its previous state. Cloud storage credentials ------------------------- One of the most important uses of ``fiona.Env`` is to set credentials for accessing data stored in AWS S3 or another cloud storage system. .. sourcecode:: python from fiona.session import AWSSession import fiona with fiona.Env( session=AWSession( aws_access_key_id="key", aws_secret_access_key="secret", ) ): fiona.open("zip+s3://example-bucket/example.zip") The AWSSession class is currently the only credential session manager in Fiona. The source code has an example of how classes for other cloud storage providers may be implemented. AWSSession relies upon boto3 and botocore, which will be installed as extra dependencies of Fiona if you run ``pip install fiona[s3]``. If you call ``fiona.open()`` with no surrounding ``Env`` and pass a path to an S3 object, a session will be created for you using code equivalent to the following code. .. sourcecode:: python import boto3 from fiona.session import AWSSession import fiona with fiona.Env(session=AWSSession(boto3.Session())): fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') Slicing and masking iterators ----------------------------- With some vector data formats a spatial index accompanies the data file, allowing efficient bounding box searches. A collection's :py:meth:`~fiona.collection.Collection.items` method returns an iterator over pairs of FIDs and records that intersect a given ``(minx, miny, maxx, maxy)`` bounding box or geometry object. Spatial filtering may be inaccurate and returning all features overlapping the envelope of the geometry. The collection's own coordinate reference system (see below) is used to interpret the box's values. If you want a list of the iterator's items, pass it to Python's builtin :py:func:`list` as shown below. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> hits = list(c.items(bbox=(-5.0, 55.0, 0.0, 60.0))) >>> len(hits) 7 The iterator method takes the same ``stop`` or ``start, stop[, step]`` slicing arguments as :py:func:`itertools.islice`. To get just the first two items from that iterator, pass a stop index. .. sourcecode:: pycon >>> hits = c.items(2, bbox=(-5.0, 55.0, 0.0, 60.0)) >>> len(list(hits)) 2 To get the third through fifth items from that iterator, pass start and stop indexes. .. sourcecode:: pycon >>> hits = c.items(2, 5, bbox=(-5.0, 55.0, 0.0, 60.0)) >>> len(list(hits)) 3 To filter features by property values, use Python's builtin :py:func:`filter` and :keyword:`lambda` or your own filter function that takes a single feature record and returns ``True`` or ``False``. .. sourcecode:: pycon >>> def pass_positive_area(rec): ... return rec['properties'].get('AREA', 0.0) > 0.0 ... >>> c = fiona.open('docs/data/test_uk.shp') >>> hits = filter(pass_positive_area, c) >>> len(list(hits)) 48 Reading Multilayer data ----------------------- Up to this point, only simple datasets with one thematic layer or feature type per file have been shown and the venerable Esri Shapefile has been the primary example. Other GIS data formats can encode multiple layers or feature types within a single file or directory. Esri's `File Geodatabase `__ is one example of such a format. A more useful example, for the purpose of this manual, is a directory comprising multiple shapefiles. The following three shell commands will create just such a two layered data source from the test data distributed with Fiona. .. sourcecode:: console $ mkdir /tmp/data $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln foo $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln bar The layers of a data source can be listed using :py:func:`fiona.listlayers`. In the shapefile format case, layer names match base names of the files. .. sourcecode:: pycon >>> fiona.listlayers('/tmp/data') ['bar', 'foo'] Unlike OGR, Fiona has no classes representing layers or data sources. To access the features of a layer, open a collection using the path to the data source and specify the layer by name using the `layer` keyword. .. sourcecode:: pycon >>> import pprint >>> datasrc_path = '/tmp/data' >>> for name in fiona.listlayers(datasrc_path): ... with fiona.open(datasrc_path, layer=name) as c: ... pprint.pprint(c.schema) ... {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} Layers may also be specified by their index. .. sourcecode:: pycon >>> for i, name in enumerate(fiona.listlayers(datasrc_path)): ... with fiona.open(datasrc_path, layer=i) as c: ... print(len(c)) ... 48 48 If no layer is specified, :py:func:`fiona.open` returns an open collection using the first layer. .. sourcecode:: pycon >>> with fiona.open(datasrc_path) as c: ... c.name == fiona.listlayers(datasrc_path)[0] ... True The most general way to open a shapefile for reading, using all of the parameters of :py:func:`fiona.open`, is to treat it as a data source with a named layer. .. sourcecode:: pycon >>> fiona.open('docs/data/test_uk.shp', 'r', layer='test_uk') In practice, it is fine to rely on the implicit first layer and default ``'r'`` mode and open a shapefile like this: .. sourcecode:: pycon >>> fiona.open('docs/data/test_uk.shp') Writing Multilayer data ----------------------- To write an entirely new layer to a multilayer data source, simply provide a unique name to the `layer` keyword argument. .. sourcecode:: pycon >>> 'wah' not in fiona.listlayers(datasrc_path) True >>> with fiona.open(datasrc_path, layer='bar') as c: ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: ... d.write(next(c)) ... >>> fiona.listlayers(datasrc_path) ['bar', 'foo', 'wah'] In ``'w'`` mode, existing layers will be overwritten if specified, just as normal files are overwritten by Python's :py:func:`open` function. .. sourcecode:: pycon >>> 'wah' in fiona.listlayers(datasrc_path) True >>> with fiona.open(datasrc_path, layer='bar') as c: ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: ... # Overwrites the existing layer named 'wah'! Virtual filesystems ------------------- Zip and Tar archives can be treated as virtual filesystems and collections can be made from paths and layers within them. In other words, Fiona lets you read zipped shapefiles. For example, make a Zip archive from the shapefile distributed with Fiona. .. sourcecode:: console $ zip /tmp/zed.zip docs/data/test_uk.* adding: docs/data/test_uk.shp (deflated 48%) adding: docs/data/test_uk.shx (deflated 37%) adding: docs/data/test_uk.dbf (deflated 98%) adding: docs/data/test_uk.prj (deflated 15%) The `vfs` keyword parameter for :py:func:`fiona.listlayers` and :py:func:`fiona.open` may be an Apache Commons VFS style string beginning with "zip://" or "tar://" and followed by an absolute or relative path to the archive file. When this parameter is used, the first argument to must be an absolute path within that archive. The layers in that Zip archive are: .. sourcecode:: pycon >>> import fiona >>> fiona.listlayers('/docs/data', vfs='zip:///tmp/zed.zip') ['test_uk'] The single shapefile may also be accessed like so: .. sourcecode:: pycon >>> with fiona.open( ... '/docs/data/test_uk.shp', ... vfs='zip:///tmp/zed.zip') as c: ... print(len(c)) ... 48 Unsupported drivers ------------------- In :py:attr:`fiona.supported_drivers` a selection of GDAL/OGR's drivers that is tested to work with Fiona is maintained. By default, Fiona allows only these drivers with their listed access modes: r for read support, respectively a for append and w for write. These restrictions can be circumvented by modifying :py:attr:`fiona.supported_drivers`: .. sourcecode:: python import fiona fiona.drvsupport.supported_drivers["LIBKML"] = "raw" with fiona.open("file.kmz") as collection: pass It should, however, first be verified, if the local installation of GDAL/OGR includes the required driver: .. sourcecode:: python from fiona.env import Env with Env() as gdalenv: print(gdalenv.drivers().keys()) Dumpgj ====== Fiona installs a script named ``dumpgj``. It converts files to GeoJSON with JSON-LD context as an option and is intended to be an upgrade to "ogr2ogr -f GeoJSON". .. sourcecode:: console $ dumpgj --help usage: dumpgj [-h] [-d] [-n N] [--compact] [--encoding ENC] [--record-buffered] [--ignore-errors] [--use-ld-context] [--add-ld-context-item TERM=URI] infile [outfile] Serialize a file's records or description to GeoJSON positional arguments: infile input file name outfile output file name, defaults to stdout if omitted optional arguments: -h, --help show this help message and exit -d, --description serialize file's data description (schema) only -n N, --indent N indentation level in N number of chars --compact use compact separators (',', ':') --encoding ENC Specify encoding of the input file --record-buffered Economical buffering of writes at record, not collection (default), level --ignore-errors log errors but do not stop serialization --use-ld-context add a JSON-LD context to JSON output --add-ld-context-item TERM=URI map a term to a URI and add it to the output's JSON LD context Final Notes =========== This manual is a work in progress and will grow and improve with Fiona. Questions and suggestions are very welcome. Please feel free to use the `issue tracker `__ or email the author directly. Do see the `README `__ for installation instructions and information about supported versions of Python and other software dependencies. Fiona would not be possible without the `contributions of other developers `__, especially Frank Warmerdam and Even Rouault, the developers of GDAL/OGR; and Mike Weisman, who saved Fiona from neglect and obscurity. References ========== .. [Kent1978] William Kent, Data and Reality, North Holland, 1978. .. [ESRI1998] ESRI Shapefile Technical Description. July 1998. http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf .. [GeoJSON] http://geojson.org .. [JSON] http://www.ietf.org/rfc/rfc4627 .. [SFA] http://en.wikipedia.org/wiki/Simple_feature_access Fiona-1.8.21/docs/modules.rst000066400000000000000000000000641420023252700157660ustar00rootroot00000000000000fiona ===== .. toctree:: :maxdepth: 4 fiona Fiona-1.8.21/environment.yml000066400000000000000000000001371420023252700157240ustar00rootroot00000000000000name: _fiona channels: - defaults - conda-forge dependencies: - python>=3.5 - cython - libgdal Fiona-1.8.21/examples/000077500000000000000000000000001420023252700144525ustar00rootroot00000000000000Fiona-1.8.21/examples/open.py000066400000000000000000000051111420023252700157630ustar00rootroot00000000000000 import fiona # This module contains examples of opening files to get feature collections in # different ways. # # It is meant to be run from the distribution root, the directory containing # setup.py. # # A ``path`` is always the ``open()`` function's first argument. It can be # absolute or relative to the working directory. It is the only positional # argument, though it is conventional to use the mode as a 2nd positional # argument. # 1. Opening a file with a single data layer (shapefiles, etc). # # args: path, mode # kwds: none # # The relative path to a file on the filesystem is given and its single layer # is selected implicitly (a shapefile has a single layer). The file is opened # for reading (mode 'r'), but since this is the default, we'll omit it in # following examples. with fiona.open('docs/data/test_uk.shp', 'r') as c: assert len(c) == 48 # 2. Opening a file with explicit layer selection (FileGDB, etc). # # args: path # kwds: layer # # Same as above but layer specified explicitly by name.. with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c: assert len(c) == 48 # 3. Opening a directory for access to a single file. # # args: path # kwds: layer # # Same as above but using the path to the directory containing the shapefile, # specified explicitly by name. with fiona.open('docs/data', layer='test_uk') as c: assert len(c) == 48 # 4. Opening a single file within a zip archive. # # args: path # kwds: vfs # # Open a file given its absolute path within a virtual filesystem. The VFS # is given an Apache Commons VFS identifier. It may contain either an absolute # path or a path relative to the working directory. # # Example archive: # # $ unzip -l docs/data/test_uk.zip # Archive: docs/data/test_uk.zip # Length Date Time Name # -------- ---- ---- ---- # 10129 04-08-13 20:49 test_uk.dbf # 143 04-08-13 20:49 test_uk.prj # 65156 04-08-13 20:49 test_uk.shp # 484 04-08-13 20:49 test_uk.shx # -------- ------- # 75912 4 files with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c: assert len(c) == 48 # 5. Opening a directory within a zip archive to select a layer. # # args: path # kwds: layer, vfs # # The most complicated case. As above, but specifying the root directory within # the virtual filesystem as the path and the layer by name (combination of # 4 and 3). It ought to be possible to open a file geodatabase within a zip # file like this. with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c: assert len(c) == 48 Fiona-1.8.21/examples/orient-ccw.py000066400000000000000000000040421420023252700170760ustar00rootroot00000000000000# An example of flipping feature polygons right side up. import datetime import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def signed_area(coords): """Return the signed area enclosed by a ring using the linear time algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 indicates a counter-clockwise oriented ring. """ xs, ys = map(list, zip(*coords)) xs.append(xs[1]) ys.append(ys[1]) return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 with fiona.open('docs/data/test_uk.shp', 'r') as source: # Copy the source schema and add two new properties. schema = source.schema.copy() schema['properties']['s_area'] = 'float' schema['properties']['timestamp'] = 'str' # Create a sink for processed features with the same format and # coordinate reference system as the source. with fiona.open( 'oriented-ccw.shp', 'w', driver=source.driver, schema=schema, crs=source.crs ) as sink: for f in source: try: # If any feature's polygon is facing "down" (has rings # wound clockwise), its rings will be reordered to flip # it "up". g = f['geometry'] assert g['type'] == 'Polygon' rings = g['coordinates'] sa = sum(signed_area(r) for r in rings) if sa < 0.0: rings = [r[::-1] for r in rings] g['coordinates'] = rings f['geometry'] = g # Add the signed area of the polygon and a timestamp # to the feature properties map. f['properties'].update( s_area=sa, timestamp=datetime.datetime.now().isoformat() ) sink.write(f) except Exception, e: logging.exception("Error processing feature %s:", f['id']) Fiona-1.8.21/examples/with-descartes-functional.py000066400000000000000000000011571420023252700221160ustar00rootroot00000000000000# Making maps with reduce() from matplotlib import pyplot from descartes import PolygonPatch import fiona BLUE = '#6699cc' def render(fig, rec): """Given matplotlib axes and a record, adds the record as a patch and returns the axes so that reduce() can accumulate more patches.""" fig.gca().add_patch( PolygonPatch(rec['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)) return fig with fiona.open('docs/data/test_uk.shp', 'r') as source: fig = reduce(render, source, pyplot.figure(figsize=(8, 8))) fig.gca().autoscale(tight=False) fig.savefig('with-descartes-functional.png') Fiona-1.8.21/examples/with-descartes.py000066400000000000000000000013331420023252700177520ustar00rootroot00000000000000 import subprocess from matplotlib import pyplot from descartes import PolygonPatch import fiona # Set up the figure and axes. BLUE = '#6699cc' fig = pyplot.figure(1, figsize=(6, 6), dpi=90) ax = fig.add_subplot(111) with fiona.drivers(): # For each feature in the collection, add a patch to the axes. with fiona.open('docs/data/test_uk.shp', 'r') as input: for f in input: ax.add_patch( PolygonPatch( f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2 )) # Should be able to get extents from the collection in a future version # of Fiona. ax.set_xlim(-9.25, 2.75) ax.set_ylim(49.5, 61.5) fig.savefig('test_uk.png') subprocess.call(['open', 'test_uk.png']) Fiona-1.8.21/examples/with-pyproj.py000066400000000000000000000022111420023252700173140ustar00rootroot00000000000000 import logging import sys from pyproj import Proj, transform import fiona from fiona.crs import from_epsg logging.basicConfig(stream=sys.stderr, level=logging.INFO) with fiona.open('docs/data/test_uk.shp', 'r') as source: sink_schema = source.schema.copy() p_in = Proj(source.crs) with fiona.open( 'with-pyproj.shp', 'w', crs=from_epsg(27700), driver=source.driver, schema=sink_schema, ) as sink: p_out = Proj(sink.crs) for f in source: try: assert f['geometry']['type'] == "Polygon" new_coords = [] for ring in f['geometry']['coordinates']: x2, y2 = transform(p_in, p_out, *zip(*ring)) new_coords.append(zip(x2, y2)) f['geometry']['coordinates'] = new_coords sink.write(f) except Exception, e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error transforming feature %s:", f['id']) Fiona-1.8.21/examples/with-shapely.py000066400000000000000000000020331420023252700174400ustar00rootroot00000000000000 import logging import sys from shapely.geometry import mapping, shape import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) with fiona.open('docs/data/test_uk.shp', 'r') as source: # **source.meta is a shortcut to get the crs, driver, and schema # keyword arguments from the source Collection. with fiona.open( 'with-shapely.shp', 'w', **source.meta) as sink: for f in source: try: geom = shape(f['geometry']) if not geom.is_valid: clean = geom.buffer(0.0) assert clean.is_valid assert clean.geom_type == 'Polygon' geom = clean f['geometry'] = mapping(geom) sink.write(f) except Exception, e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error cleaning feature %s:", f['id']) Fiona-1.8.21/fiona/000077500000000000000000000000001420023252700137305ustar00rootroot00000000000000Fiona-1.8.21/fiona/__init__.py000066400000000000000000000327511420023252700160510ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Fiona is OGR's neat, nimble API. Fiona provides a minimal, uncomplicated Python interface to the open source GIS community's most trusted geodata access library and integrates readily with other Python GIS packages such as pyproj, Rtree and Shapely. How minimal? Fiona can read features as mappings from shapefiles or other GIS vector formats and write mappings as features to files using the same formats. That's all. There aren't any feature or geometry classes. Features and their geometries are just data. A Fiona feature is a Python mapping inspired by the GeoJSON format. It has `id`, 'geometry`, and `properties` keys. The value of `id` is a string identifier unique within the feature's parent collection. The `geometry` is another mapping with `type` and `coordinates` keys. The `properties` of a feature is another mapping corresponding to its attribute table. For example: {'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'label': u'Null Island'} } is a Fiona feature with a point geometry and one property. Features are read and written using objects returned by the ``collection`` function. These ``Collection`` objects are a lot like Python ``file`` objects. A ``Collection`` opened in reading mode serves as an iterator over features. One opened in a writing mode provides a ``write`` method. Usage ----- Here's an example of reading a select few polygon features from a shapefile and for each, picking off the first vertex of the exterior ring of the polygon and using that as the point geometry for a new feature writing to a "points.shp" file. >>> import fiona >>> with fiona.open('docs/data/test_uk.shp', 'r') as inp: ... output_schema = inp.schema.copy() ... output_schema['geometry'] = 'Point' ... with collection( ... "points.shp", "w", ... crs=inp.crs, ... driver="ESRI Shapefile", ... schema=output_schema ... ) as out: ... for f in inp.filter( ... bbox=(-5.0, 55.0, 0.0, 60.0) ... ): ... value = f['geometry']['coordinates'][0][0] ... f['geometry'] = { ... 'type': 'Point', 'coordinates': value} ... out.write(f) Because Fiona collections are context managers, they are closed and (in writing modes) flush contents to disk when their ``with`` blocks end. """ from contextlib import contextmanager import logging import os import sys import warnings import platform from six import string_types try: from pathlib import Path except ImportError: # pragma: no cover class Path: pass # TODO: remove this? Or at least move it, flake8 complains. if sys.platform == "win32": libdir = os.path.join(os.path.dirname(__file__), ".libs") os.environ["PATH"] = os.environ["PATH"] + ";" + libdir import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona.collection import BytesCollection, Collection from fiona.drvsupport import supported_drivers from fiona.env import ensure_env_with_credentials, Env from fiona.errors import FionaDeprecationWarning from fiona._env import driver_count from fiona._env import ( calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name, get_gdal_version_tuple) from fiona.compat import OrderedDict from fiona.io import MemoryFile from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove, _remove_layer from fiona.path import ParsedPath, parse_path, vsi_path from fiona.vfs import parse_paths as vfs_parse_paths from fiona._show_versions import show_versions # These modules are imported by fiona.ogrext, but are also import here to # help tools like cx_Freeze find them automatically from fiona import _geometry, _err, rfc3339 import uuid __all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width'] __version__ = "1.8.21" __gdal_version__ = get_gdal_release_name() gdal_version = get_gdal_version_tuple() log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) @ensure_env_with_credentials def open(fp, mode='r', driver=None, schema=None, crs=None, encoding=None, layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, **kwargs): """Open a collection for read, append, or write In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see OGR docs or ``ogr2ogr --help`` on the command line) and a schema mapping such as: {'geometry': 'Point', 'properties': [('class', 'int'), ('label', 'str'), ('value', 'float')]} must be provided. If a particular ordering of properties ("fields" in GIS parlance) in the written file is desired, a list of (key, value) pairs as above or an ordered dict is required. If no ordering is needed, a standard dict will suffice. A coordinate reference system for collections in write mode can be defined by the ``crs`` parameter. It takes Proj4 style mappings like {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True} short hand strings like EPSG:4326 or WKT representations of coordinate reference systems. The drivers used by Fiona will try to detect the encoding of data files. If they fail, you may provide the proper ``encoding``, such as 'Windows-1252' for the Natural Earth datasets. When the provided path is to a file containing multiple named layers of data, a layer can be singled out by ``layer``. The drivers enabled for opening datasets may be restricted to those listed in the ``enabled_drivers`` parameter. This and the ``driver`` parameter afford much control over opening of files. # Trying only the GeoJSON driver when opening to read, the # following raises ``DataIOError``: fiona.open('example.shp', driver='GeoJSON') # Trying first the GeoJSON driver, then the Shapefile driver, # the following succeeds: fiona.open( 'example.shp', enabled_drivers=['GeoJSON', 'ESRI Shapefile']) Parameters ---------- fp : URI (str or pathlib.Path), or file-like object A dataset resource identifier or file object. mode : str One of 'r', to read (the default); 'a', to append; or 'w', to write. driver : str In 'w' mode a format driver name is required. In 'r' or 'a' mode this parameter has no effect. schema : dict Required in 'w' mode, has no effect in 'r' or 'a' mode. crs : str or dict Required in 'w' mode, has no effect in 'r' or 'a' mode. encoding : str Name of the encoding used to encode or decode the dataset. layer : int or str The integer index or name of a layer in a multi-layer dataset. vfs : str This is a deprecated parameter. A URI scheme such as "zip://" should be used instead. enabled_drivers : list An optional list of driver names to used when opening a collection. crs_wkt : str An optional WKT representation of a coordinate reference system. kwargs : mapping Other driver-specific parameters that will be interpreted by the OGR library as layer creation or opening options. Returns ------- Collection """ if mode == 'r' and hasattr(fp, 'read'): @contextmanager def fp_reader(fp): memfile = MemoryFile(fp.read()) dataset = memfile.open( driver=driver, crs=crs, schema=schema, layer=layer, encoding=encoding, enabled_drivers=enabled_drivers, **kwargs) try: yield dataset finally: dataset.close() memfile.close() return fp_reader(fp) elif mode == 'w' and hasattr(fp, 'write'): if schema: # Make an ordered dict of schema properties. this_schema = schema.copy() this_schema['properties'] = OrderedDict(schema['properties']) else: this_schema = None @contextmanager def fp_writer(fp): memfile = MemoryFile() dataset = memfile.open( driver=driver, crs=crs, schema=this_schema, layer=layer, encoding=encoding, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt, **kwargs) try: yield dataset finally: dataset.close() memfile.seek(0) fp.write(memfile.read()) memfile.close() return fp_writer(fp) elif mode == "a" and hasattr(fp, "write"): raise OSError( "Append mode is not supported for datasets in a Python file object." ) else: # If a pathlib.Path instance is given, convert it to a string path. if isinstance(fp, Path): fp = str(fp) if vfs: warnings.warn("The vfs keyword argument is deprecated. Instead, pass a URL that uses a zip or tar (for example) scheme.", FionaDeprecationWarning, stacklevel=2) path, scheme, archive = vfs_parse_paths(fp, vfs=vfs) path = ParsedPath(path, archive, scheme) else: path = parse_path(fp) if mode in ('a', 'r'): c = Collection(path, mode, driver=driver, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, **kwargs) elif mode == 'w': if schema: # Make an ordered dict of schema properties. this_schema = schema.copy() if 'properties' in schema: this_schema['properties'] = OrderedDict(schema['properties']) else: this_schema['properties'] = OrderedDict() if 'geometry' not in this_schema: this_schema['geometry'] = None else: this_schema = None c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt, **kwargs) else: raise ValueError( "mode string must be one of 'r', 'w', or 'a', not %s" % mode) return c collection = open def remove(path_or_collection, driver=None, layer=None): """Deletes an OGR data source The required ``path`` argument may be an absolute or relative file path. Alternatively, a Collection can be passed instead in which case the path and driver are automatically determined. Otherwise the ``driver`` argument must be specified. Raises a ``RuntimeError`` if the data source cannot be deleted. Example usage: fiona.remove('test.shp', 'ESRI Shapefile') """ if isinstance(path_or_collection, Collection): collection = path_or_collection path = collection.path driver = collection.driver collection.close() else: path = path_or_collection if layer is None: _remove(path, driver) else: _remove_layer(path, layer, driver) @ensure_env_with_credentials def listlayers(fp, vfs=None): """List layer names in their index order Parameters ---------- fp : URI (str or pathlib.Path), or file-like object A dataset resource identifier or file object. vfs : str This is a deprecated parameter. A URI scheme such as "zip://" should be used instead. Returns ------- list A list of layer name strings. """ if hasattr(fp, 'read'): with MemoryFile(fp.read()) as memfile: return _listlayers(memfile.name) else: if isinstance(fp, Path): fp = str(fp) if not isinstance(fp, string_types): raise TypeError("invalid path: %r" % fp) if vfs and not isinstance(vfs, string_types): raise TypeError("invalid vfs: %r" % vfs) if vfs: warnings.warn("The vfs keyword argument is deprecated. Instead, pass a URL that uses a zip or tar (for example) scheme.", FionaDeprecationWarning, stacklevel=2) pobj_vfs = parse_path(vfs) pobj_path = parse_path(fp) pobj = ParsedPath(pobj_path.path, pobj_vfs.path, pobj_vfs.scheme) else: pobj = parse_path(fp) return _listlayers(vsi_path(pobj)) def prop_width(val): """Returns the width of a str type property. Undefined for non-str properties. Example: >>> prop_width('str:25') 25 >>> prop_width('str') 80 """ if val.startswith('str'): return int((val.split(":")[1:] or ["80"])[0]) return None def prop_type(text): """Returns a schema property's proper Python type. Example: >>> prop_type('int') >>> prop_type('str:25') """ key = text.split(':')[0] return FIELD_TYPES_MAP[key] def drivers(*args, **kwargs): """Returns a context manager with registered drivers. DEPRECATED """ warnings.warn("Use fiona.Env() instead.", FionaDeprecationWarning, stacklevel=2) if driver_count == 0: log.debug("Creating a chief GDALEnv in drivers()") return Env(**kwargs) else: log.debug("Creating a not-responsible GDALEnv in drivers()") return Env(**kwargs) def bounds(ob): """Returns a (minx, miny, maxx, maxy) bounding box. The ``ob`` may be a feature record or geometry.""" geom = ob.get('geometry') or ob return _bounds(geom) Fiona-1.8.21/fiona/_cpl.pxd000066400000000000000000000013351420023252700153640ustar00rootroot00000000000000# Cross-platform API functions. cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) cdef extern from "cpl_vsi.h": ctypedef struct VSILFILE: pass int VSIFCloseL (VSILFILE *) VSILFILE * VSIFileFromMemBuffer (const char * filename, unsigned char * data, int data_len, int take_ownership) int VSIUnlink (const char * pathname) ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY Fiona-1.8.21/fiona/_crs.pxd000066400000000000000000000021631420023252700153750ustar00rootroot00000000000000# Coordinate system and transform API functions. cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH srs, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) Fiona-1.8.21/fiona/_crs.pyx000066400000000000000000000044501420023252700154230ustar00rootroot00000000000000"""Extension module supporting crs.py. Calls methods from GDAL's OSR module. """ from __future__ import absolute_import import logging from six import string_types from fiona cimport _cpl from fiona._err cimport exc_wrap_pointer from fiona._err import CPLE_BaseError from fiona._shim cimport osr_get_name, osr_set_traditional_axis_mapping_strategy from fiona.compat import DICT_TYPES from fiona.errors import CRSError logger = logging.getLogger(__name__) cdef int OAMS_TRADITIONAL_GIS_ORDER = 0 # Export a WKT string from input crs. def crs_to_wkt(crs): """Convert a Fiona CRS object to WKT format""" cdef OGRSpatialReferenceH cogr_srs = NULL cdef char *proj_c = NULL try: cogr_srs = exc_wrap_pointer(OSRNewSpatialReference(NULL)) except CPLE_BaseError as exc: raise CRSError(u"{}".format(exc)) # First, check for CRS strings like "EPSG:3857". if isinstance(crs, string_types): proj_b = crs.encode('utf-8') proj_c = proj_b OSRSetFromUserInput(cogr_srs, proj_c) elif isinstance(crs, DICT_TYPES): # EPSG is a special case. init = crs.get('init') if init: logger.debug("Init: %s", init) auth, val = init.split(':') if auth.upper() == 'EPSG': logger.debug("Setting EPSG: %s", val) OSRImportFromEPSG(cogr_srs, int(val)) else: params = [] crs['wktext'] = True for k, v in crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) logger.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b OSRImportFromProj4(cogr_srs, proj_c) else: raise CRSError("Invalid input to create CRS: {}".format(crs)) osr_set_traditional_axis_mapping_strategy(cogr_srs) OSRExportToWkt(cogr_srs, &proj_c) if proj_c == NULL: raise CRSError("Invalid input to create CRS: {}".format(crs)) proj_b = proj_c _cpl.CPLFree(proj_c) if not proj_b: raise CRSError("Invalid input to create CRS: {}".format(crs)) return proj_b.decode('utf-8') Fiona-1.8.21/fiona/_csl.pxd000066400000000000000000000003451420023252700153670ustar00rootroot00000000000000# String API functions. cdef extern from "cpl_string.h": char ** CSLAddNameValue (char **list, char *name, char *value) char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) Fiona-1.8.21/fiona/_env.pxd000066400000000000000000000002161420023252700153730ustar00rootroot00000000000000cdef class ConfigEnv(object): cdef public object options cdef class GDALEnv(ConfigEnv): cdef public object _have_registered_drivers Fiona-1.8.21/fiona/_env.pyx000066400000000000000000000353521420023252700154310ustar00rootroot00000000000000# cython: c_string_type=unicode, c_string_encoding=utf8 """GDAL and OGR driver and configuration management The main thread always utilizes CPLSetConfigOption. Child threads utilize CPLSetThreadLocalConfigOption instead. All threads use CPLGetConfigOption and not CPLGetThreadLocalConfigOption, thus child threads will inherit config options from the main thread unless the option is set to a new value inside the thread. """ include "gdal.pxi" from collections import namedtuple import logging import os import os.path import sys import threading from fiona._err cimport exc_wrap_int, exc_wrap_ogrerr from fiona._shim cimport set_proj_search_path, get_proj_version from fiona._err import CPLE_BaseError from fiona.errors import EnvError level_map = { 0: 0, 1: logging.DEBUG, 2: logging.WARNING, 3: logging.ERROR, 4: logging.CRITICAL } code_map = { 0: 'CPLE_None', 1: 'CPLE_AppDefined', 2: 'CPLE_OutOfMemory', 3: 'CPLE_FileIO', 4: 'CPLE_OpenFailed', 5: 'CPLE_IllegalArg', 6: 'CPLE_NotSupported', 7: 'CPLE_AssertionFailed', 8: 'CPLE_NoWriteAccess', 9: 'CPLE_UserInterrupt', 10: 'ObjectNull', # error numbers 11-16 are introduced in GDAL 2.1. See # https://github.com/OSGeo/gdal/pull/98. 11: 'CPLE_HttpResponse', 12: 'CPLE_AWSBucketNotFound', 13: 'CPLE_AWSObjectNotFound', 14: 'CPLE_AWSAccessDenied', 15: 'CPLE_AWSInvalidCredentials', 16: 'CPLE_AWSSignatureDoesNotMatch'} log = logging.getLogger(__name__) try: import certifi os.environ.setdefault("CURL_CA_BUNDLE", certifi.where()) except ImportError: pass cdef bint is_64bit = sys.maxsize > 2 ** 32 cdef _safe_osr_release(OGRSpatialReferenceH srs): """Wrapper to handle OSR release when NULL.""" if srs != NULL: OSRRelease(srs) srs = NULL def calc_gdal_version_num(maj, min, rev): """Calculates the internal gdal version number based on major, minor and revision GDAL Version Information macro changed with GDAL version 1.10.0 (April 2013) """ if (maj, min, rev) >= (1, 10, 0): return int(maj * 1000000 + min * 10000 + rev * 100) else: return int(maj * 1000 + min * 100 + rev * 10) def get_gdal_version_num(): """Return current internal version number of gdal""" return int(GDALVersionInfo("VERSION_NUM")) def get_gdal_release_name(): """Return release name of gdal""" cdef const char *name_c = NULL name_c = GDALVersionInfo("RELEASE_NAME") name = name_c return name GDALVersion = namedtuple("GDALVersion", ["major", "minor", "revision"]) def get_gdal_version_tuple(): """ Calculates gdal version tuple from gdal's internal version number. GDAL Version Information macro changed with GDAL version 1.10.0 (April 2013) """ gdal_version_num = get_gdal_version_num() if gdal_version_num >= calc_gdal_version_num(1, 10, 0): major = gdal_version_num // 1000000 minor = (gdal_version_num - (major * 1000000)) // 10000 revision = (gdal_version_num - (major * 1000000) - (minor * 10000)) // 100 return GDALVersion(major, minor, revision) else: major = gdal_version_num // 1000 minor = (gdal_version_num - (major * 1000)) // 100 revision = (gdal_version_num - (major * 1000) - (minor * 100)) // 10 return GDALVersion(major, minor, revision) def get_proj_version_tuple(): """ Returns proj version tuple for gdal >= 3.0.1, otherwise None """ cdef int major cdef int minor cdef int patch gdal_version_num = get_gdal_version_num() if gdal_version_num < calc_gdal_version_num(3, 0, 1): proj_version = None else: get_proj_version(&major, &minor, &patch) return (major, minor, patch) cdef void log_error(CPLErr err_class, int err_no, const char* msg) with gil: """Send CPL debug messages and warnings to Python's logger.""" log = logging.getLogger(__name__) if err_no in code_map: log.log(level_map[err_class], "%s", msg) else: log.info("Unknown error number %r.", err_no) # Definition of GDAL callback functions, one for Windows and one for # other platforms. Each calls log_error(). IF UNAME_SYSNAME == "Windows": cdef void __stdcall logging_error_handler(CPLErr err_class, int err_no, const char* msg) with gil: log_error(err_class, err_no, msg) ELSE: cdef void logging_error_handler(CPLErr err_class, int err_no, const char* msg) with gil: log_error(err_class, err_no, msg) def driver_count(): """Return the count of all drivers""" return GDALGetDriverCount() + OGRGetDriverCount() cpdef get_gdal_config(key, normalize=True): """Get the value of a GDAL configuration option. When requesting ``GDAL_CACHEMAX`` the value is returned unaltered. Parameters ---------- key : str Name of config option. normalize : bool, optional Convert values of ``"ON"'`` and ``"OFF"`` to ``True`` and ``False``. """ key = key.encode('utf-8') # GDAL_CACHEMAX is a special case if key.lower() == b'gdal_cachemax': if is_64bit: return GDALGetCacheMax64() else: return GDALGetCacheMax() else: val = CPLGetConfigOption(key, NULL) if not val: return None elif not normalize: return val elif val.isdigit(): return int(val) else: if val == u'ON': return True elif val == u'OFF': return False else: return val cpdef set_gdal_config(key, val, normalize=True): """Set a GDAL configuration option's value. Parameters ---------- key : str Name of config option. normalize : bool, optional Convert ``True`` to `"ON"` and ``False`` to `"OFF"``. """ key = key.encode('utf-8') # GDAL_CACHEMAX is a special case if key.lower() == b'gdal_cachemax': if is_64bit: GDALSetCacheMax64(val) else: GDALSetCacheMax(val) return elif normalize and isinstance(val, bool): val = ('ON' if val and val else 'OFF').encode('utf-8') else: # Value could be an int val = str(val).encode('utf-8') if isinstance(threading.current_thread(), threading._MainThread): CPLSetConfigOption(key, val) else: CPLSetThreadLocalConfigOption(key, val) cpdef del_gdal_config(key): """Delete a GDAL configuration option. Parameters ---------- key : str Name of config option. """ key = key.encode('utf-8') if isinstance(threading.current_thread(), threading._MainThread): CPLSetConfigOption(key, NULL) else: CPLSetThreadLocalConfigOption(key, NULL) cdef class ConfigEnv(object): """Configuration option management""" def __init__(self, **options): self.options = options.copy() self.update_config_options(**self.options) def update_config_options(self, **kwargs): """Update GDAL config options.""" for key, val in kwargs.items(): set_gdal_config(key, val) self.options[key] = val def clear_config_options(self): """Clear GDAL config options.""" while self.options: key, val = self.options.popitem() del_gdal_config(key) def get_config_options(self): return {k: get_gdal_config(k) for k in self.options} class GDALDataFinder(object): """Finds GDAL data files Note: this is not part of the 1.8.x public API. """ def find_file(self, basename): """Returns path of a GDAL data file or None Parameters ---------- basename : str Basename of a data file such as "header.dxf" Returns ------- str (on success) or None (on failure) """ cdef const char *path_c = NULL basename_b = basename.encode('utf-8') path_c = CPLFindFile("gdal", basename_b) if path_c == NULL: return None else: path = path_c return path def search(self, prefix=None): """Returns GDAL data directory Note well that os.environ is not consulted. Returns ------- str or None """ path = self.search_wheel(prefix or __file__) if not path: path = self.search_prefix(prefix or sys.prefix) if not path: path = self.search_debian(prefix or sys.prefix) return path def search_wheel(self, prefix=None): """Check wheel location""" if prefix is None: prefix = __file__ datadir = os.path.abspath(os.path.join(os.path.dirname(prefix), "gdal_data")) return datadir if os.path.exists(os.path.join(datadir, 'header.dxf')) else None def search_prefix(self, prefix=sys.prefix): """Check sys.prefix location""" datadir = os.path.join(prefix, 'share', 'gdal') return datadir if os.path.exists(os.path.join(datadir, 'header.dxf')) else None def search_debian(self, prefix=sys.prefix): """Check Debian locations""" gdal_release_name = GDALVersionInfo("RELEASE_NAME") datadir = os.path.join(prefix, 'share', 'gdal', '{}.{}'.format(*gdal_release_name.split('.')[:2])) return datadir if os.path.exists(os.path.join(datadir, 'header.dxf')) else None class PROJDataFinder(object): """Finds PROJ data files Note: this is not part of the public 1.8.x API. """ def has_data(self): """Returns True if PROJ's data files can be found Returns ------- bool """ cdef OGRSpatialReferenceH osr = OSRNewSpatialReference(NULL) try: exc_wrap_ogrerr(exc_wrap_int(OSRImportFromEPSG(osr, 4326))) except CPLE_BaseError: return False else: return True finally: _safe_osr_release(osr) def search(self, prefix=None): """Returns PROJ data directory Note well that os.environ is not consulted. Returns ------- str or None """ path = self.search_wheel(prefix or __file__) if not path: path = self.search_prefix(prefix or sys.prefix) return path def search_wheel(self, prefix=None): """Check wheel location""" if prefix is None: prefix = __file__ datadir = os.path.abspath(os.path.join(os.path.dirname(prefix), "proj_data")) return datadir if os.path.exists(datadir) else None def search_prefix(self, prefix=sys.prefix): """Check sys.prefix location""" datadir = os.path.join(prefix, 'share', 'proj') return datadir if os.path.exists(datadir) else None cdef class GDALEnv(ConfigEnv): """Configuration and driver management""" def __init__(self, **options): super(GDALEnv, self).__init__(**options) self._have_registered_drivers = False def start(self): CPLPushErrorHandler(logging_error_handler) # The outer if statement prevents each thread from acquiring a # lock when the environment starts, and the inner avoids a # potential race condition. if not self._have_registered_drivers: with threading.Lock(): if not self._have_registered_drivers: GDALAllRegister() OGRRegisterAll() if 'GDAL_DATA' in os.environ: log.debug("GDAL_DATA found in environment.") self.update_config_options(GDAL_DATA=os.environ['GDAL_DATA']) else: path = GDALDataFinder().search_wheel() if path: log.debug("GDAL data found in package: path=%r.", path) self.update_config_options(GDAL_DATA=path) # See https://github.com/mapbox/rasterio/issues/1631. elif GDALDataFinder().find_file("header.dxf"): log.debug("GDAL data files are available at built-in paths.") else: path = GDALDataFinder().search() if path: log.debug("GDAL data found in other locations: path=%r.", path) self.update_config_options(GDAL_DATA=path) if 'PROJ_LIB' in os.environ: log.debug("PROJ_LIB found in environment.") path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) else: path = PROJDataFinder().search_wheel() if path: log.debug("PROJ data found in package: path=%r.", path) set_proj_data_search_path(path) elif PROJDataFinder().has_data(): log.debug("PROJ data files are available at built-in paths.") else: path = PROJDataFinder().search() if path: log.debug("PROJ data found in other locations: path=%r.", path) set_proj_data_search_path(path) if driver_count() == 0: CPLPopErrorHandler() raise ValueError("Drivers not registered.") # Flag the drivers as registered, otherwise every thread # will acquire a threadlock every time a new environment # is started rather than just whenever the first thread # actually makes it this far. self._have_registered_drivers = True log.debug("Started GDALEnv: self=%r.", self) def stop(self): # NB: do not restore the CPL error handler to its default # state here. If you do, log messages will be written to stderr # by GDAL instead of being sent to Python's logging module. log.debug("Stopping GDALEnv %r.", self) CPLPopErrorHandler() log.debug("Error handler popped.") log.debug("Stopped GDALEnv %r.", self) def drivers(self): cdef OGRSFDriverH driver = NULL cdef int i result = {} for i in range(OGRGetDriverCount()): drv = OGRGetDriver(i) key = OGR_Dr_GetName(drv) val = OGR_Dr_GetName(drv) result[key] = val return result def set_proj_data_search_path(path): """Set PROJ data search path""" set_proj_search_path(path) Fiona-1.8.21/fiona/_err.pxd000066400000000000000000000005701420023252700153760ustar00rootroot00000000000000from libc.stdio cimport * cdef extern from "cpl_vsi.h": ctypedef FILE VSILFILE cdef extern from "ogr_core.h": ctypedef int OGRErr cdef get_last_error_msg() cdef int exc_wrap_int(int retval) except -1 cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 cdef void *exc_wrap_pointer(void *ptr) except NULL cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL Fiona-1.8.21/fiona/_err.pyx000066400000000000000000000166101420023252700154250ustar00rootroot00000000000000"""fiona._err Transformation of GDAL C API errors to Python exceptions using Python's ``with`` statement and an error-handling context manager class. The ``cpl_errs`` error-handling context manager is intended for use in Rasterio's Cython code. When entering the body of a ``with`` statement, the context manager clears GDAL's error stack. On exit, the context manager pops the last error off the stack and raises an appropriate Python exception. It's otherwise pretty difficult to do this kind of thing. I couldn't make it work with a CPL error handler, Cython's C code swallows exceptions raised from C callbacks. When used to wrap a call to open a PNG in update mode with cpl_errs: cdef void *hds = GDALOpen('file.png', 1) if hds == NULL: raise ValueError("NULL dataset") the ValueError of last resort never gets raised because the context manager raises a more useful and informative error: Traceback (most recent call last): File "/Users/sean/code/rasterio/scripts/rio_insp", line 65, in with rasterio.open(args.src, args.mode) as src: File "/Users/sean/code/rasterio/rasterio/__init__.py", line 111, in open s.start() ValueError: The PNG driver does not support update access to existing datasets. """ from __future__ import absolute_import # CPL function declarations. cdef extern from "cpl_error.h": ctypedef enum CPLErr: CE_None CE_Debug CE_Warning CE_Failure CE_Fatal int CPLGetLastErrorNo() const char* CPLGetLastErrorMsg() int CPLGetLastErrorType() void CPLErrorReset() from enum import IntEnum # Python exceptions expressing the CPL error numbers. class CPLE_BaseError(Exception): """Base CPL error class Exceptions deriving from this class are intended for use only in Rasterio's Cython code. Let's not expose API users to them. """ def __init__(self, error, errno, errmsg): self.error = error self.errno = errno self.errmsg = errmsg def __str__(self): return self.__unicode__() def __unicode__(self): return u"{}".format(self.errmsg) @property def args(self): return self.error, self.errno, self.errmsg class CPLE_AppDefinedError(CPLE_BaseError): pass class CPLE_OutOfMemoryError(CPLE_BaseError): pass class CPLE_FileIOError(CPLE_BaseError): pass class CPLE_OpenFailedError(CPLE_BaseError): pass class CPLE_IllegalArgError(CPLE_BaseError): pass class CPLE_NotSupportedError(CPLE_BaseError): pass class CPLE_AssertionFailedError(CPLE_BaseError): pass class CPLE_NoWriteAccessError(CPLE_BaseError): pass class CPLE_UserInterruptError(CPLE_BaseError): pass class ObjectNullError(CPLE_BaseError): pass class CPLE_HttpResponseError(CPLE_BaseError): pass class CPLE_AWSBucketNotFoundError(CPLE_BaseError): pass class CPLE_AWSObjectNotFoundError(CPLE_BaseError): pass class CPLE_AWSAccessDeniedError(CPLE_BaseError): pass class CPLE_AWSInvalidCredentialsError(CPLE_BaseError): pass class CPLE_AWSSignatureDoesNotMatchError(CPLE_BaseError): pass class FionaNullPointerError(CPLE_BaseError): """ Returned from exc_wrap_pointer when a NULL pointer is passed, but no GDAL error was raised. """ pass class FionaCPLError(CPLE_BaseError): """ Returned from exc_wrap_int when a error code is returned, but no GDAL error was set. """ pass # Map of GDAL error numbers to the Python exceptions. exception_map = { 1: CPLE_AppDefinedError, 2: CPLE_OutOfMemoryError, 3: CPLE_FileIOError, 4: CPLE_OpenFailedError, 5: CPLE_IllegalArgError, 6: CPLE_NotSupportedError, 7: CPLE_AssertionFailedError, 8: CPLE_NoWriteAccessError, 9: CPLE_UserInterruptError, 10: ObjectNullError, # error numbers 11-16 are introduced in GDAL 2.1. See # https://github.com/OSGeo/gdal/pull/98. 11: CPLE_HttpResponseError, 12: CPLE_AWSBucketNotFoundError, 13: CPLE_AWSObjectNotFoundError, 14: CPLE_AWSAccessDeniedError, 15: CPLE_AWSInvalidCredentialsError, 16: CPLE_AWSSignatureDoesNotMatchError} # CPL Error types as an enum. class GDALError(IntEnum): none = CE_None debug = CE_Debug warning = CE_Warning failure = CE_Failure fatal = CE_Fatal cdef class GDALErrCtxManager: """A manager for GDAL error handling contexts.""" def __enter__(self): CPLErrorReset() return self def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): cdef int err_type = CPLGetLastErrorType() cdef int err_no = CPLGetLastErrorNo() cdef const char *msg = CPLGetLastErrorMsg() # TODO: warn for err_type 2? if err_type >= 2: raise exception_map[err_no](err_type, err_no, msg) cdef inline object exc_check(): """Checks GDAL error stack for fatal or non-fatal errors Returns ------- An Exception, SystemExit, or None """ cdef const char *msg_c = NULL err_type = CPLGetLastErrorType() err_no = CPLGetLastErrorNo() err_msg = CPLGetLastErrorMsg() if err_msg == NULL: msg = "No error message." else: # Reformat messages. msg_b = err_msg msg = msg_b.decode('utf-8') msg = msg.replace("`", "'") msg = msg.replace("\n", " ") if err_type == 3: CPLErrorReset() return exception_map.get( err_no, CPLE_BaseError)(err_type, err_no, msg) if err_type == 4: return SystemExit("Fatal error: {0}".format((err_type, err_no, msg))) else: return cdef get_last_error_msg(): """Checks GDAL error stack for the latest error message Returns ------- An error message or empty string """ err_msg = CPLGetLastErrorMsg() if err_msg != NULL: # Reformat messages. msg_b = err_msg msg = msg_b.decode('utf-8') msg = msg.replace("`", "'") msg = msg.replace("\n", " ") else: msg = "" return msg cdef int exc_wrap_int(int err) except -1: """Wrap a GDAL/OGR function that returns CPLErr or OGRErr (int) Raises a Rasterio exception if a non-fatal error has be set. """ if err: exc = exc_check() if exc: raise exc else: raise FionaCPLError(-1, -1, "The wrapped function returned an error code, but no error message was set.") return err cdef OGRErr exc_wrap_ogrerr(OGRErr err) except -1: """Wrap a function that returns OGRErr but does not use the CPL error stack. """ if err == 0: return err else: raise CPLE_BaseError(3, err, "OGR Error code {}".format(err)) cdef void *exc_wrap_pointer(void *ptr) except NULL: """Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *) Raises a Rasterio exception if a non-fatal error has be set. """ if ptr == NULL: exc = exc_check() if exc: raise exc else: # null pointer was passed, but no error message from GDAL raise FionaNullPointerError(-1, -1, "NULL pointer error") return ptr cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL: """Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *) Raises a Rasterio exception if a non-fatal error has be set. """ if f == NULL: exc = exc_check() if exc: raise exc return f cpl_errs = GDALErrCtxManager() Fiona-1.8.21/fiona/_geometry.pxd000066400000000000000000000106701420023252700164430ustar00rootroot00000000000000# Geometry API functions. ctypedef int OGRErr cdef extern from "ogr_core.h": ctypedef enum OGRwkbGeometryType: wkbUnknown wkbPoint wkbLineString wkbPolygon wkbMultiPoint wkbMultiLineString wkbMultiPolygon wkbGeometryCollection wkbCircularString wkbCompoundCurve wkbCurvePolygon wkbMultiCurve wkbMultiSurface wkbCurve wkbSurface wkbPolyhedralSurface wkbTIN wkbTriangle wkbNone wkbLinearRing wkbCircularStringZ wkbCompoundCurveZ wkbCurvePolygonZ wkbMultiCurveZ wkbMultiSurfaceZ wkbCurveZ wkbSurfaceZ wkbPolyhedralSurfaceZ wkbTINZ wkbTriangleZ wkbPointM wkbLineStringM wkbPolygonM wkbMultiPointM wkbMultiLineStringM wkbMultiPolygonM wkbGeometryCollectionM wkbCircularStringM wkbCompoundCurveM wkbCurvePolygonM wkbMultiCurveM wkbMultiSurfaceM wkbCurveM wkbSurfaceM wkbPolyhedralSurfaceM wkbTINM wkbTriangleM wkbPointZM wkbLineStringZM wkbPolygonZM wkbMultiPointZM wkbMultiLineStringZM wkbMultiPolygonZM wkbGeometryCollectionZM wkbCircularStringZM wkbCompoundCurveZM wkbCurvePolygonZM wkbMultiCurveZM wkbMultiSurfaceZM wkbCurveZM wkbSurfaceZM wkbPolyhedralSurfaceZM wkbTINZM wkbTriangleZM wkbPoint25D wkbLineString25D wkbPolygon25D wkbMultiPoint25D wkbMultiLineString25D wkbMultiPolygon25D wkbGeometryCollection25D ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY cdef extern from "ogr_api.h": OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (OGRwkbGeometryType wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) cdef class GeomBuilder: cdef void *geom cdef object code cdef object geomtypename cdef object ndims cdef _buildCoords(self, void *geom) cpdef _buildPoint(self) cpdef _buildLineString(self) cpdef _buildLinearRing(self) cdef _buildParts(self, void *geom) cpdef _buildPolygon(self) cpdef _buildMultiPoint(self) cpdef _buildMultiLineString(self) cpdef _buildMultiPolygon(self) cpdef _buildGeometryCollection(self) cdef build(self, void *geom) cpdef build_wkb(self, object wkb) cdef class OGRGeomBuilder: cdef void * _createOgrGeometry(self, int geom_type) except NULL cdef _addPointToGeometry(self, void *cogr_geometry, object coordinate) cdef void * _buildPoint(self, object coordinates) except NULL cdef void * _buildLineString(self, object coordinates) except NULL cdef void * _buildLinearRing(self, object coordinates) except NULL cdef void * _buildPolygon(self, object coordinates) except NULL cdef void * _buildMultiPoint(self, object coordinates) except NULL cdef void * _buildMultiLineString(self, object coordinates) except NULL cdef void * _buildMultiPolygon(self, object coordinates) except NULL cdef void * _buildGeometryCollection(self, object coordinates) except NULL cdef void * build(self, object geom) except NULL cdef unsigned int geometry_type_code(object name) except? 9999 cdef object normalize_geometry_type_code(unsigned int code) cdef unsigned int base_geometry_type_code(unsigned int code) Fiona-1.8.21/fiona/_geometry.pyx000066400000000000000000000256341420023252700164760ustar00rootroot00000000000000# Coordinate and geometry transformations. from __future__ import absolute_import import logging from fiona.errors import UnsupportedGeometryTypeError from fiona._err cimport exc_wrap_int class NullHandler(logging.Handler): def emit(self, record): pass log = logging.getLogger(__name__) log.addHandler(NullHandler()) # Mapping of OGR integer geometry types to GeoJSON type names. GEOMETRY_TYPES = { 0: 'Unknown', 1: 'Point', 2: 'LineString', 3: 'Polygon', 4: 'MultiPoint', 5: 'MultiLineString', 6: 'MultiPolygon', 7: 'GeometryCollection', # Unsupported types. #8: 'CircularString', #9: 'CompoundCurve', #10: 'CurvePolygon', #11: 'MultiCurve', #12: 'MultiSurface', #13: 'Curve', #14: 'Surface', #15: 'PolyhedralSurface', #16: 'TIN', #17: 'Triangle', 100: 'None', 101: 'LinearRing', 0x80000001: '3D Point', 0x80000002: '3D LineString', 0x80000003: '3D Polygon', 0x80000004: '3D MultiPoint', 0x80000005: '3D MultiLineString', 0x80000006: '3D MultiPolygon', 0x80000007: '3D GeometryCollection' } # mapping of GeoJSON type names to OGR integer geometry types GEOJSON2OGR_GEOMETRY_TYPES = dict((v, k) for k, v in GEOMETRY_TYPES.iteritems()) cdef unsigned int geometry_type_code(name) except? 9999: """Map OGC geometry type names to integer codes.""" offset = 0 if name.endswith('ZM'): offset = 3000 elif name.endswith('M'): offset = 2000 elif name.endswith('Z'): offset = 1000 normalized_name = name.rstrip('ZM') if normalized_name not in GEOJSON2OGR_GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(name) return offset + GEOJSON2OGR_GEOMETRY_TYPES[normalized_name] cdef object normalize_geometry_type_code(unsigned int code): """Normalize M geometry type codes.""" # Normalize 'M' types to 2D types. if 2000 <= code < 3000: code = code % 1000 elif code == 3000: code = 0 # Normalize 'ZM' types to 3D types. elif 3000 < code < 4000: code = (code % 1000) | 0x80000000 if code not in GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(code) return code cdef inline unsigned int base_geometry_type_code(unsigned int code): """ Returns base geometry code without Z, M and ZM types """ # Remove 2.5D flag. code = code & (~0x80000000) # Normalize Z, M, and ZM types. Fiona 1.x does not support M # and doesn't treat OGC 'Z' variants as special types of their # own. return code % 1000 # Geometry related functions and classes follow. cdef void * _createOgrGeomFromWKB(object wkb) except NULL: """Make an OGR geometry from a WKB string""" wkbtype = bytearray(wkb)[1] cdef unsigned char *buffer = wkb cdef void *cogr_geometry = OGR_G_CreateGeometry(wkbtype) if cogr_geometry is not NULL: exc_wrap_int(OGR_G_ImportFromWkb(cogr_geometry, buffer, len(wkb))) return cogr_geometry cdef _deleteOgrGeom(void *cogr_geometry): """Delete an OGR geometry""" if cogr_geometry is not NULL: OGR_G_DestroyGeometry(cogr_geometry) cogr_geometry = NULL cdef class GeomBuilder: """Builds Fiona (GeoJSON) geometries from an OGR geometry handle. """ cdef _buildCoords(self, void *geom): # Build a coordinate sequence cdef int i if geom == NULL: raise ValueError("Null geom") npoints = OGR_G_GetPointCount(geom) coords = [] for i in range(npoints): values = [OGR_G_GetX(geom, i), OGR_G_GetY(geom, i)] if self.ndims > 2: values.append(OGR_G_GetZ(geom, i)) coords.append(tuple(values)) return coords cpdef _buildPoint(self): return {'type': 'Point', 'coordinates': self._buildCoords(self.geom)[0]} cpdef _buildLineString(self): return {'type': 'LineString', 'coordinates': self._buildCoords(self.geom)} cpdef _buildLinearRing(self): return {'type': 'LinearRing', 'coordinates': self._buildCoords(self.geom)} cdef _buildParts(self, void *geom): cdef int j cdef void *part if geom == NULL: raise ValueError("Null geom") parts = [] for j in range(OGR_G_GetGeometryCount(geom)): part = OGR_G_GetGeometryRef(geom, j) parts.append(GeomBuilder().build(part)) return parts cpdef _buildPolygon(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'Polygon', 'coordinates': coordinates} cpdef _buildMultiPoint(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiPoint', 'coordinates': coordinates} cpdef _buildMultiLineString(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiLineString', 'coordinates': coordinates} cpdef _buildMultiPolygon(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiPolygon', 'coordinates': coordinates} cpdef _buildGeometryCollection(self): parts = self._buildParts(self.geom) return {'type': 'GeometryCollection', 'geometries': parts} cdef build(self, void *geom): # The only method anyone needs to call if geom == NULL: raise ValueError("Null geom") cdef unsigned int etype = OGR_G_GetGeometryType(geom) self.code = base_geometry_type_code(etype) if self.code not in GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(self.code) self.geomtypename = GEOMETRY_TYPES[self.code] self.ndims = OGR_G_GetCoordinateDimension(geom) self.geom = geom return getattr(self, '_build' + self.geomtypename)() cpdef build_wkb(self, object wkb): # The only other method anyone needs to call cdef object data = wkb cdef void *cogr_geometry = _createOgrGeomFromWKB(data) result = self.build(cogr_geometry) _deleteOgrGeom(cogr_geometry) return result cdef class OGRGeomBuilder: """Builds OGR geometries from Fiona geometries. """ cdef void * _createOgrGeometry(self, int geom_type) except NULL: cdef void *cogr_geometry = OGR_G_CreateGeometry(geom_type) if cogr_geometry == NULL: raise Exception("Could not create OGR Geometry of type: %i" % geom_type) return cogr_geometry cdef _addPointToGeometry(self, void *cogr_geometry, object coordinate): if len(coordinate) == 2: x, y = coordinate OGR_G_AddPoint_2D(cogr_geometry, x, y) else: x, y, z = coordinate[:3] OGR_G_AddPoint(cogr_geometry, x, y, z) cdef void * _buildPoint(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Point']) self._addPointToGeometry(cogr_geometry, coordinates) return cogr_geometry cdef void * _buildLineString(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LineString']) for coordinate in coordinates: self._addPointToGeometry(cogr_geometry, coordinate) return cogr_geometry cdef void * _buildLinearRing(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LinearRing']) for coordinate in coordinates: self._addPointToGeometry(cogr_geometry, coordinate) OGR_G_CloseRings(cogr_geometry) return cogr_geometry cdef void * _buildPolygon(self, object coordinates) except NULL: cdef void *cogr_ring cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Polygon']) for ring in coordinates: cogr_ring = self._buildLinearRing(ring) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_ring)) return cogr_geometry cdef void * _buildMultiPoint(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPoint']) for coordinate in coordinates: cogr_part = self._buildPoint(coordinate) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildMultiLineString(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiLineString']) for line in coordinates: cogr_part = self._buildLineString(line) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildMultiPolygon(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPolygon']) for part in coordinates: cogr_part = self._buildPolygon(part) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildGeometryCollection(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['GeometryCollection']) for part in coordinates: cogr_part = OGRGeomBuilder().build(part) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * build(self, object geometry) except NULL: cdef object typename = geometry['type'] cdef object coordinates = geometry.get('coordinates') if typename == 'Point': return self._buildPoint(coordinates) elif typename == 'LineString': return self._buildLineString(coordinates) elif typename == 'LinearRing': return self._buildLinearRing(coordinates) elif typename == 'Polygon': return self._buildPolygon(coordinates) elif typename == 'MultiPoint': return self._buildMultiPoint(coordinates) elif typename == 'MultiLineString': return self._buildMultiLineString(coordinates) elif typename == 'MultiPolygon': return self._buildMultiPolygon(coordinates) elif typename == 'GeometryCollection': coordinates = geometry.get('geometries') return self._buildGeometryCollection(coordinates) else: raise ValueError("Unsupported geometry type %s" % typename) def geometryRT(geometry): # For testing purposes only, leaks the JSON data cdef void *cogr_geometry = OGRGeomBuilder().build(geometry) result = GeomBuilder().build(cogr_geometry) _deleteOgrGeom(cogr_geometry) return result Fiona-1.8.21/fiona/_loading.py000066400000000000000000000014411420023252700160560ustar00rootroot00000000000000import glob import os import logging import contextlib import platform import sys log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) # With Python >= 3.8 on Windows directories in PATH are not automatically # searched for DLL dependencies and must be added manually with # os.add_dll_directory. # see https://github.com/Toblerity/Fiona/issues/851 @contextlib.contextmanager def add_gdal_dll_directories(): dll_dirs = [] if platform.system() == 'Windows' and sys.version_info >= (3, 8): dll_directory = os.path.join(os.path.dirname(__file__), '.libs') if os.path.exists(dll_directory): dll_dirs.append(os.add_dll_directory(dll_directory)) try: yield None finally: for dll_dir in dll_dirs: dll_dir.close() Fiona-1.8.21/fiona/_shim1.pxd000066400000000000000000000041511420023252700156260ustar00rootroot00000000000000include "ogrext1.pxd" ctypedef enum OGRFieldSubType: OFSTNone = 0 OFSTBoolean = 1 OFSTInt16 = 2 OFSTFloat32 = 3 OFSTMaxSubType = 3 cdef bint is_field_null(void *feature, int n) cdef void set_field_null(void *feature, int n) cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) cdef bint check_capability_create_layer(void *cogr_ds) cdef void *get_linear_geometry(void *geom) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef void get_proj_version(int *, int *, int *) cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) from fiona._shim cimport OGR_F_GetFieldAsInteger as OGR_F_GetFieldAsInteger64 from fiona._shim cimport OGR_F_SetFieldInteger as OGR_F_SetFieldInteger64 from fiona._shim cimport OGR_DS_GetLayerByName as GDALDatasetGetLayerByName from fiona._shim cimport OGR_DS_GetLayer as GDALDatasetGetLayer from fiona._shim cimport OGR_DS_Destroy as GDALClose from fiona._shim cimport OGR_DS_GetDriver as GDALGetDatasetDriver from fiona._shim cimport OGRGetDriverByName as GDALGetDriverByName from fiona._shim cimport OGR_DS_GetLayerCount as GDALDatasetGetLayerCount from fiona._shim cimport OGR_DS_DeleteLayer as GDALDatasetDeleteLayer from fiona._shim cimport OGR_DS_CreateLayer as GDALDatasetCreateLayer from fiona._shim cimport OGR_Dr_DeleteDataSource as GDALDeleteDataset Fiona-1.8.21/fiona/_shim1.pyx000066400000000000000000000106661420023252700156630ustar00rootroot00000000000000"""Shims on top of ogrext for GDAL versions < 2""" import os from fiona.ogrext1 cimport * from fiona._err cimport exc_wrap_pointer, exc_wrap_int from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError cdef int OGRERR_NONE = 0 cdef bint is_field_null(void *feature, int n): if not OGR_F_IsFieldSet(feature, n): return True else: return False cdef void set_field_null(void *feature, int n): pass cdef void gdal_flush_cache(void *cogr_ds): retval = exc_wrap_int(OGR_DS_SyncToDisk(cogr_ds)) if retval != OGRERR_NONE: raise RuntimeError("Failed to sync to disk") cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL: cdef void* cogr_ds = NULL cdef void* drv = NULL cdef void* ds = NULL encoding = options.get('encoding', None) if encoding: val = encoding.encode('utf-8') CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) else: CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") if drivers: for name in drivers: name_b = name.encode() name_c = name_b drv = OGRGetDriverByName(name_c) if drv != NULL: ds = OGR_Dr_Open(drv, path_c, mode) if ds != NULL: cogr_ds = ds break else: cogr_ds = OGROpen(path_c, mode, NULL) try: return exc_wrap_pointer(cogr_ds) except FionaNullPointerError: raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: cdef void* cogr_ds = NULL cdef char **opts = NULL encoding = options.get('encoding', None) if encoding: val = encoding.encode('utf-8') CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) else: CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") for k, v in options.items(): k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') opts = CSLAddNameValue(opts, k, v) try: return exc_wrap_pointer( OGR_Dr_CreateDataSource(cogr_driver, path_c, opts) ) except FionaNullPointerError: raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(opts) # transactions are not supported in GDAL 1.x cdef bint check_capability_transaction(void *cogr_ds): return False cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return OGRERR_NONE cdef OGRErr gdal_commit_transaction(void* cogr_ds): return OGRERR_NONE cdef OGRErr gdal_rollback_transaction(void* cogr_ds): return OGRERR_NONE # field subtypes are not supported in GDAL 1.x cdef OGRFieldSubType get_field_subtype(void *fielddefn): return OFSTNone cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): pass cdef bint check_capability_create_layer(void *cogr_ds): return OGR_DS_TestCapability(cogr_ds, ODsCCreateLayer) cdef void *get_linear_geometry(void *geom): return geom cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): return '' cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): OSRFixup(hSrs) cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path cdef void get_proj_version(int* major, int* minor, int* patch): cdef int val = -1 major[0] = val minor[0] = val patch[0] = val cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): cdef int nSecond nSecond = int(fSecond) OGR_F_SetFieldDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, nSecond, nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): cdef int retval cdef int nSecond retval = OGR_F_GetFieldAsDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, &nSecond, nTZFlag) fSecond[0] = float(nSecond) return retval Fiona-1.8.21/fiona/_shim2.pxd000066400000000000000000000023541420023252700156320ustar00rootroot00000000000000include "ogrext2.pxd" cdef bint is_field_null(void *feature, int n) cdef void set_field_null(void *feature, int n) cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) cdef bint check_capability_create_layer(void *cogr_ds) cdef void *get_linear_geometry(void *geom) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef void get_proj_version(int *, int *, int *) cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) Fiona-1.8.21/fiona/_shim2.pyx000066400000000000000000000111041420023252700156500ustar00rootroot00000000000000"""Shims on top of ogrext for GDAL versions > 2""" import logging import os from fiona.ogrext2 cimport * from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError log = logging.getLogger(__name__) cdef bint is_field_null(void *feature, int n): if not OGR_F_IsFieldSet(feature, n): return True else: return False cdef void set_field_null(void *feature, int n): pass cdef void gdal_flush_cache(void *cogr_ds): with cpl_errs: GDALFlushCache(cogr_ds) cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL: cdef void* cogr_ds = NULL cdef char **drvs = NULL cdef char **open_opts = NULL flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR if mode == 1: flags |= GDAL_OF_UPDATE else: flags |= GDAL_OF_READONLY if drivers: for name in drivers: name_b = name.encode() name_c = name_b drv = GDALGetDriverByName(name_c) if drv != NULL: drvs = CSLAddString(drvs, name_c) for k, v in options.items(): if v is None: continue k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) open_opts = CSLAddNameValue(open_opts, k, v) open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") try: cogr_ds = exc_wrap_pointer(GDALOpenEx( path_c, flags, drvs, open_opts, NULL) ) return cogr_ds except FionaNullPointerError: raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(drvs) CSLDestroy(open_opts) cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: cdef char **creation_opts = NULL for k, v in options.items(): k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) creation_opts = CSLAddNameValue(creation_opts, k, v) try: return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) except FionaNullPointerError: raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(creation_opts) cdef bint check_capability_transaction(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) cdef OGRErr gdal_commit_transaction(void* cogr_ds): return GDALDatasetCommitTransaction(cogr_ds) cdef OGRErr gdal_rollback_transaction(void* cogr_ds): return GDALDatasetRollbackTransaction(cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn): return OGR_Fld_GetSubType(fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): OGR_Fld_SetSubType(fielddefn, subtype) cdef bint check_capability_create_layer(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) cdef void *get_linear_geometry(void *geom): return OGR_G_GetLinearGeometry(geom, 0.0, NULL) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): return '' cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): OSRFixup(hSrs) cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path cdef void get_proj_version(int* major, int* minor, int* patch): cdef int val = -1 major[0] = val minor[0] = val patch[0] = val cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) Fiona-1.8.21/fiona/_shim22.pxd000066400000000000000000000023541420023252700157140ustar00rootroot00000000000000include "ogrext2.pxd" cdef bint is_field_null(void *feature, int n) cdef void set_field_null(void *feature, int n) cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) cdef bint check_capability_create_layer(void *cogr_ds) cdef void *get_linear_geometry(void *geom) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef void get_proj_version(int *, int *, int *) cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) Fiona-1.8.21/fiona/_shim22.pyx000066400000000000000000000114411420023252700157360ustar00rootroot00000000000000"""Shims on top of ogrext for GDAL versions >= 2.2""" cdef extern from "ogr_api.h": int OGR_F_IsFieldNull(void *feature, int n) import logging import os from fiona.ogrext2 cimport * from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError log = logging.getLogger(__name__) cdef bint is_field_null(void *feature, int n): if OGR_F_IsFieldNull(feature, n): return True elif not OGR_F_IsFieldSet(feature, n): return True else: return False cdef void set_field_null(void *feature, int n): OGR_F_SetFieldNull(feature, n) cdef void gdal_flush_cache(void *cogr_ds): with cpl_errs: GDALFlushCache(cogr_ds) cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: cdef void* cogr_ds = NULL cdef char **drvs = NULL cdef void* drv = NULL cdef char **open_opts = NULL flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR if mode == 1: flags |= GDAL_OF_UPDATE else: flags |= GDAL_OF_READONLY if drivers: for name in drivers: name_b = name.encode() name_c = name_b drv = GDALGetDriverByName(name_c) if drv != NULL: drvs = CSLAddString(drvs, name_c) for k, v in options.items(): if v is None: continue k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) open_opts = CSLAddNameValue(open_opts, k, v) open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") try: cogr_ds = exc_wrap_pointer( GDALOpenEx(path_c, flags, drvs, open_opts, NULL) ) return cogr_ds except FionaNullPointerError: raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(drvs) CSLDestroy(open_opts) cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: cdef char **creation_opts = NULL cdef void *cogr_ds = NULL for k, v in options.items(): k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) creation_opts = CSLAddNameValue(creation_opts, k, v) try: return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) except FionaNullPointerError: raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(creation_opts) cdef bint check_capability_transaction(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) cdef OGRErr gdal_commit_transaction(void* cogr_ds): return GDALDatasetCommitTransaction(cogr_ds) cdef OGRErr gdal_rollback_transaction(void* cogr_ds): return GDALDatasetRollbackTransaction(cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn): return OGR_Fld_GetSubType(fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): OGR_Fld_SetSubType(fielddefn, subtype) cdef bint check_capability_create_layer(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) cdef void *get_linear_geometry(void *geom): return OGR_G_GetLinearGeometry(geom, 0.0, NULL) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): return '' cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): OSRFixup(hSrs) cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path cdef void get_proj_version(int* major, int* minor, int* patch): cdef int val = -1 major[0] = val minor[0] = val patch[0] = val cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) Fiona-1.8.21/fiona/_shim3.pxd000066400000000000000000000023541420023252700156330ustar00rootroot00000000000000include "ogrext3.pxd" cdef bint is_field_null(void *feature, int n) cdef void set_field_null(void *feature, int n) cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) cdef bint check_capability_create_layer(void *cogr_ds) cdef void *get_linear_geometry(void *geom) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef void get_proj_version(int *, int *, int *) cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) Fiona-1.8.21/fiona/_shim3.pyx000066400000000000000000000124501420023252700156560ustar00rootroot00000000000000"""Shims on top of ogrext for GDAL versions >= 3.0""" cdef extern from "ogr_api.h": int OGR_F_IsFieldNull(void *feature, int n) cdef extern from "ogr_srs_api.h" nogil: ctypedef enum OSRAxisMappingStrategy: OAMS_TRADITIONAL_GIS_ORDER const char* OSRGetName(OGRSpatialReferenceH hSRS) void OSRSetAxisMappingStrategy(OGRSpatialReferenceH hSRS, OSRAxisMappingStrategy) void OSRSetPROJSearchPaths(const char *const *papszPaths) from fiona.ogrext3 cimport * from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError import logging log = logging.getLogger(__name__) cdef bint is_field_null(void *feature, int n): if OGR_F_IsFieldNull(feature, n): return True elif not OGR_F_IsFieldSet(feature, n): return True else: return False cdef void set_field_null(void *feature, int n): OGR_F_SetFieldNull(feature, n) cdef void gdal_flush_cache(void *cogr_ds): with cpl_errs: GDALFlushCache(cogr_ds) cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: cdef void* cogr_ds = NULL cdef char **drvs = NULL cdef void* drv = NULL cdef char **open_opts = NULL flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR if mode == 1: flags |= GDAL_OF_UPDATE else: flags |= GDAL_OF_READONLY if drivers: for name in drivers: name_b = name.encode() name_c = name_b drv = GDALGetDriverByName(name_c) if drv != NULL: drvs = CSLAddString(drvs, name_c) for k, v in options.items(): if v is None: continue k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) open_opts = CSLAddNameValue(open_opts, k, v) open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") try: cogr_ds = exc_wrap_pointer( GDALOpenEx(path_c, flags, drvs, open_opts, NULL) ) return cogr_ds except FionaNullPointerError: raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(drvs) CSLDestroy(open_opts) cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: cdef char **creation_opts = NULL cdef void *cogr_ds = NULL for k, v in options.items(): k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) creation_opts = CSLAddNameValue(creation_opts, k, v) try: return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) except FionaNullPointerError: raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) except CPLE_BaseError as exc: raise DriverError(str(exc)) finally: CSLDestroy(creation_opts) cdef bint check_capability_transaction(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) cdef OGRErr gdal_commit_transaction(void* cogr_ds): return GDALDatasetCommitTransaction(cogr_ds) cdef OGRErr gdal_rollback_transaction(void* cogr_ds): return GDALDatasetRollbackTransaction(cogr_ds) cdef OGRFieldSubType get_field_subtype(void *fielddefn): return OGR_Fld_GetSubType(fielddefn) cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): OGR_Fld_SetSubType(fielddefn, subtype) cdef bint check_capability_create_layer(void *cogr_ds): return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) cdef void *get_linear_geometry(void *geom): return OGR_G_GetLinearGeometry(geom, 0.0, NULL) cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): return OSRGetName(hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): OSRSetAxisMappingStrategy(hSrs, OAMS_TRADITIONAL_GIS_ORDER) cdef void set_proj_search_path(object path): cdef char **paths = NULL cdef const char *path_c = NULL path_b = path.encode("utf-8") path_c = path_b paths = CSLAddString(paths, path_c) OSRSetPROJSearchPaths(paths) cdef void get_proj_version(int* major, int* minor, int* patch): OSRGetPROJVersion(major, minor, patch) cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) Fiona-1.8.21/fiona/_show_versions.py000066400000000000000000000024461420023252700173570ustar00rootroot00000000000000import platform import sys import os import fiona from fiona._env import get_gdal_release_name, get_proj_version_tuple def show_versions(): """ Prints information useful for bug reports """ fiona_version = fiona.__version__ gdal_release_name = get_gdal_release_name() proj_version_tuple = get_proj_version_tuple() if proj_version_tuple is not None: proj_version = ".".join(map(str, proj_version_tuple)) else: proj_version = "Proj version not available" os_info = "{system} {release}".format(system=platform.system(), release=platform.release()) python_version = platform.python_version() python_exec = sys.executable msg = ("Fiona version: {fiona_version}" "\nGDAL version: {gdal_release_name}" "\nPROJ version: {proj_version}" "\n" "\nOS: {os_info}" "\nPython: {python_version}" "\nPython executable: '{python_exec}'" "\n" ) print(msg.format(fiona_version=fiona_version, gdal_release_name=gdal_release_name, proj_version=proj_version, os_info=os_info, python_version=python_version, python_exec=python_exec)) Fiona-1.8.21/fiona/_transform.pyx000066400000000000000000000167631420023252700166610ustar00rootroot00000000000000# distutils: language = c++ # # Coordinate and geometry transformations. from __future__ import absolute_import import logging from fiona cimport _cpl, _crs, _csl, _geometry from fiona._crs cimport OGRSpatialReferenceH from fiona._shim cimport osr_set_traditional_axis_mapping_strategy from fiona.compat import UserDict cdef extern from "ogr_geometry.h" nogil: cdef cppclass OGRGeometry: pass cdef cppclass OGRGeometryFactory: void * transformWithOptions(void *geom, void *ct, char **options) cdef extern from "ogr_spatialref.h": cdef cppclass OGRCoordinateTransformation: pass log = logging.getLogger(__name__) class NullHandler(logging.Handler): def emit(self, record): pass log.addHandler(NullHandler()) cdef void *_crs_from_crs(object crs): cdef char *proj_c = NULL cdef OGRSpatialReferenceH osr = NULL osr = _crs.OSRNewSpatialReference(NULL) if osr == NULL: raise ValueError("NULL spatial reference") params = [] # Normally, we expect a CRS dict. if isinstance(crs, UserDict): crs = dict(crs) if isinstance(crs, dict): # EPSG is a special case. init = crs.get('init') if init: auth, val = init.split(':') if auth.upper() == 'EPSG': _crs.OSRImportFromEPSG(osr, int(val)) else: crs['wktext'] = True for k, v in crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) log.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b _crs.OSRImportFromProj4(osr, proj_c) # Fall back for CRS strings like "EPSG:3857." else: proj_b = crs.encode('utf-8') proj_c = proj_b _crs.OSRSetFromUserInput(osr, proj_c) osr_set_traditional_axis_mapping_strategy(osr) return osr def _transform(src_crs, dst_crs, xs, ys): cdef double *x cdef double *y cdef char *proj_c = NULL cdef OGRSpatialReferenceH src = NULL cdef OGRSpatialReferenceH dst = NULL cdef void *transform = NULL cdef int i assert len(xs) == len(ys) src = _crs_from_crs(src_crs) dst = _crs_from_crs(dst_crs) n = len(xs) x = _cpl.CPLMalloc(n*sizeof(double)) y = _cpl.CPLMalloc(n*sizeof(double)) for i in range(n): x[i] = xs[i] y[i] = ys[i] transform = _crs.OCTNewCoordinateTransformation(src, dst) res = _crs.OCTTransform(transform, n, x, y, NULL) res_xs = [0]*n res_ys = [0]*n for i in range(n): res_xs[i] = x[i] res_ys[i] = y[i] _cpl.CPLFree(x) _cpl.CPLFree(y) _crs.OCTDestroyCoordinateTransformation(transform) _crs.OSRRelease(src) _crs.OSRRelease(dst) return res_xs, res_ys def _transform_geom( src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, precision): """Return a transformed geometry.""" cdef char *proj_c = NULL cdef char *key_c = NULL cdef char *val_c = NULL cdef char **options = NULL cdef OGRSpatialReferenceH src = NULL cdef OGRSpatialReferenceH dst = NULL cdef void *transform = NULL cdef OGRGeometryFactory *factory = NULL cdef void *src_ogr_geom = NULL cdef void *dst_ogr_geom = NULL cdef int i if src_crs and dst_crs: src = _crs_from_crs(src_crs) dst = _crs_from_crs(dst_crs) transform = _crs.OCTNewCoordinateTransformation(src, dst) # Transform options. options = _csl.CSLSetNameValue( options, "DATELINEOFFSET", str(antimeridian_offset).encode('utf-8')) if antimeridian_cutting: options = _csl.CSLSetNameValue(options, "WRAPDATELINE", "YES") factory = new OGRGeometryFactory() src_ogr_geom = _geometry.OGRGeomBuilder().build(geom) dst_ogr_geom = factory.transformWithOptions( src_ogr_geom, transform, options) g = _geometry.GeomBuilder().build(dst_ogr_geom) _geometry.OGR_G_DestroyGeometry(dst_ogr_geom) _geometry.OGR_G_DestroyGeometry(src_ogr_geom) _crs.OCTDestroyCoordinateTransformation(transform) if options != NULL: _csl.CSLDestroy(options) _crs.OSRRelease(src) _crs.OSRRelease(dst) else: g = geom if precision >= 0: def round_point(g): coords = list(g['coordinates']) x, y = coords[:2] x = round(x, precision) y = round(y, precision) new_coords = [x, y] if len(coords) == 3: z = coords[2] new_coords.append(round(z, precision)) return new_coords def round_linestring(g): coords = list(zip(*g['coordinates'])) xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] if len(coords) == 3: zp = coords[2] zp = [round(v, precision) for v in zp] new_coords = list(zip(xp, yp, zp)) else: new_coords = list(zip(xp, yp)) return new_coords def round_polygon(g): new_coords = [] for piece in g['coordinates']: coords = list(zip(*piece)) xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] if len(coords) == 3: zp = coords[2] zp = [round(v, precision) for v in zp] new_coords.append(list(zip(xp, yp, zp))) else: new_coords.append(list(zip(xp, yp))) return new_coords def round_multipolygon(g): parts = g['coordinates'] new_coords = [] for part in parts: inner_coords = [] for ring in part: coords = list(zip(*ring)) xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] if len(coords) == 3: zp = coords[2] zp = [round(v, precision) for v in zp] inner_coords.append(list(zip(xp, yp, zp))) else: inner_coords.append(list(zip(xp, yp))) new_coords.append(inner_coords) return new_coords def round_geometry(g): if g['type'] == 'Point': g['coordinates'] = round_point(g) elif g['type'] in ['LineString', 'MultiPoint']: g['coordinates'] = round_linestring(g) elif g['type'] in ['Polygon', 'MultiLineString']: g['coordinates'] = round_polygon(g) elif g['type'] == 'MultiPolygon': g['coordinates'] = round_multipolygon(g) else: raise RuntimeError("Unsupported geometry type: {}".format(g['type'])) if g['type'] == 'GeometryCollection': for _g in g['geometries']: round_geometry(_g) else: round_geometry(g) return g Fiona-1.8.21/fiona/collection.py000066400000000000000000000531751420023252700164500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Collections provide file-like access to feature data import logging import os import warnings import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona import compat, vfs from fiona.ogrext import Iterator, ItemsIterator, KeysIterator from fiona.ogrext import Session, WritingSession from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES from fiona.errors import (DriverError, SchemaError, CRSError, UnsupportedGeometryTypeError, DriverSupportError) from fiona.logutils import FieldSkipLogFilter from fiona._crs import crs_to_wkt from fiona._env import get_gdal_release_name, get_gdal_version_tuple from fiona.env import env_ctx_if_needed from fiona.errors import FionaDeprecationWarning from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, _driver_supports_field) from fiona.path import Path, vsi_path, parse_path from six import string_types, binary_type log = logging.getLogger(__name__) class Collection(object): """A file-like interface to features of a vector dataset Python text file objects are iterators over lines of a file. Fiona Collections are similar iterators (not lists!) over features represented as GeoJSON-like mappings. """ def __init__(self, path, mode='r', driver=None, schema=None, crs=None, encoding=None, layer=None, vsi=None, archive=None, enabled_drivers=None, crs_wkt=None, ignore_fields=None, ignore_geometry=False, **kwargs): """The required ``path`` is the absolute or relative path to a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can be read only. In ``mode`` 'a', data can be appended to a file. In ``mode`` 'w', data overwrites the existing contents of a file. In ``mode`` 'w', an OGR ``driver`` name and a ``schema`` are required. A Proj4 ``crs`` string is recommended. If both ``crs`` and ``crs_wkt`` keyword arguments are passed, the latter will trump the former. In 'w' mode, kwargs will be mapped to OGR layer creation options. """ if not isinstance(path, (string_types, Path)): raise TypeError("invalid path: %r" % path) if not isinstance(mode, string_types) or mode not in ('r', 'w', 'a'): raise TypeError("invalid mode: %r" % mode) if driver and not isinstance(driver, string_types): raise TypeError("invalid driver: %r" % driver) if schema and not hasattr(schema, 'get'): raise TypeError("invalid schema: %r" % schema) if crs and not isinstance(crs, compat.DICT_TYPES + string_types): raise TypeError("invalid crs: %r" % crs) if crs_wkt and not isinstance(crs_wkt, string_types): raise TypeError("invalid crs_wkt: %r" % crs_wkt) if encoding and not isinstance(encoding, string_types): raise TypeError("invalid encoding: %r" % encoding) if layer and not isinstance(layer, tuple(list(string_types) + [int])): raise TypeError("invalid name: %r" % layer) if vsi: if not isinstance(vsi, string_types) or not vfs.valid_vsi(vsi): raise TypeError("invalid vsi: %r" % vsi) if archive and not isinstance(archive, string_types): raise TypeError("invalid archive: %r" % archive) self.session = None self.iterator = None self._len = 0 self._bounds = None self._driver = None self._schema = None self._crs = None self._crs_wkt = None self.env = None self.enabled_drivers = enabled_drivers self.ignore_fields = ignore_fields self.ignore_geometry = bool(ignore_geometry) # Check GDAL version against drivers if driver in driver_mode_mingdal[mode] and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver]: min_gdal_version = ".".join(list(map(str, driver_mode_mingdal[mode][driver]))) raise DriverError( "{driver} driver requires at least GDAL {min_gdal_version} for mode '{mode}', " "Fiona was compiled against: {gdal}".format(driver=driver, mode=mode, min_gdal_version=min_gdal_version, gdal=get_gdal_release_name())) if vsi: self.path = vfs.vsi_path(path, vsi, archive) path = parse_path(self.path) else: path = parse_path(path) self.path = vsi_path(path) if mode == 'w': if layer and not isinstance(layer, string_types): raise ValueError("in 'w' mode, layer names must be strings") if driver == 'GeoJSON': if layer is not None: raise ValueError("the GeoJSON format does not have layers") self.name = 'OgrGeoJSON' # TODO: raise ValueError as above for other single-layer formats. else: self.name = layer or os.path.basename(os.path.splitext(path.path)[0]) else: if layer in (0, None): self.name = 0 else: self.name = layer or os.path.basename(os.path.splitext(path)[0]) self.mode = mode if self.mode == 'w': if driver == 'Shapefile': driver = 'ESRI Shapefile' if not driver: raise DriverError("no driver") elif driver not in supported_drivers: raise DriverError( "unsupported driver: %r" % driver) elif self.mode not in supported_drivers[driver]: raise DriverError( "unsupported mode: %r" % self.mode) self._driver = driver if not schema: raise SchemaError("no schema") elif 'properties' not in schema: raise SchemaError("schema lacks: properties") elif 'geometry' not in schema: raise SchemaError("schema lacks: geometry") self._schema = schema self._check_schema_driver_support() if crs_wkt or crs: self._crs_wkt = crs_to_wkt(crs_wkt or crs) self._driver = driver kwargs.update(encoding=encoding) self.encoding = encoding try: if self.mode == 'r': self.session = Session() self.session.start(self, **kwargs) elif self.mode in ('a', 'w'): self.session = WritingSession() self.session.start(self, **kwargs) except IOError: self.session = None raise if self.session is not None: self.guard_driver_mode() if self.mode in ("a", "w"): self._valid_geom_types = _get_valid_geom_types(self.schema, self.driver) self.field_skip_log_filter = FieldSkipLogFilter() def __repr__(self): return "<%s Collection '%s', mode '%s' at %s>" % ( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, hex(id(self))) def guard_driver_mode(self): driver = self.session.get_driver() if driver not in supported_drivers: raise DriverError("unsupported driver: %r" % driver) if self.mode not in supported_drivers[driver]: raise DriverError("unsupported mode: %r" % self.mode) @property def driver(self): """Returns the name of the proper OGR driver.""" if not self._driver and self.mode in ("a", "r") and self.session: self._driver = self.session.get_driver() return self._driver @property def schema(self): """Returns a mapping describing the data schema. The mapping has 'geometry' and 'properties' items. The former is a string such as 'Point' and the latter is an ordered mapping that follows the order of fields in the data file. """ if not self._schema and self.mode in ("a", "r") and self.session: self._schema = self.session.get_schema() return self._schema @property def crs(self): """Returns a Proj4 string.""" if self._crs is None and self.session: self._crs = self.session.get_crs() return self._crs @property def crs_wkt(self): """Returns a WKT string.""" if self._crs_wkt is None and self.session: self._crs_wkt = self.session.get_crs_wkt() return self._crs_wkt @property def meta(self): """Returns a mapping with the driver, schema, crs, and additional properties.""" return { 'driver': self.driver, 'schema': self.schema, 'crs': self.crs, 'crs_wkt': self.crs_wkt} profile = meta def filter(self, *args, **kwds): """Returns an iterator over records, but filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. Note: spatial filtering using ``mask`` may be inaccurate and returning all features overlapping the envelope of ``mask``. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = Iterator( self, start, stop, step, bbox, mask) return self.iterator def items(self, *args, **kwds): """Returns an iterator over FID, record pairs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. Note: spatial filtering using ``mask`` may be inaccurate and returning all features overlapping the envelope of ``mask``. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = ItemsIterator( self, start, stop, step, bbox, mask) return self.iterator def keys(self, *args, **kwds): """Returns an iterator over FIDs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. Note: spatial filtering using ``mask`` may be inaccurate and returning all features overlapping the envelope of ``mask``. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = KeysIterator( self, start, stop, step, bbox, mask) return self.iterator def __contains__(self, fid): return self.session.has_feature(fid) values = filter def __iter__(self): """Returns an iterator over records.""" return self.filter() def __next__(self): """Returns next record from iterator.""" warnings.warn("Collection.__next__() is buggy and will be removed in " "Fiona 2.0. Switch to `next(iter(collection))`.", FionaDeprecationWarning, stacklevel=2) if not self.iterator: iter(self) return next(self.iterator) next = __next__ def __getitem__(self, item): return self.session.__getitem__(item) def get(self, item): return self.session.get(item) def writerecords(self, records): """Stages multiple records for writing to disk.""" if self.closed: raise ValueError("I/O operation on closed collection") if self.mode not in ('a', 'w'): raise IOError("collection not open for writing") self.session.writerecs(records, self) self._len = self.session.get_length() self._bounds = None def write(self, record): """Stages a record for writing to disk.""" self.writerecords([record]) def validate_record(self, record): """Compares the record to the collection's schema. Returns ``True`` if the record matches, else ``False``. """ # Currently we only compare keys of properties, not the types of # values. return ( set(record['properties'].keys()) == set(self.schema['properties'].keys()) and self.validate_record_geometry(record)) def validate_record_geometry(self, record): """Compares the record's geometry to the collection's schema. Returns ``True`` if the record matches, else ``False``. """ # Shapefiles welcome mixes of line/multis and polygon/multis. # OGR reports these mixed files as type "Polygon" or "LineString" # but will return either these or their multi counterparts when # reading features. if (self.driver == "ESRI Shapefile" and "Point" not in record['geometry']['type']): return record['geometry']['type'].lstrip( "Multi") == self.schema['geometry'].lstrip("3D ").lstrip( "Multi") else: return ( record['geometry']['type'] == self.schema['geometry'].lstrip("3D ")) def __len__(self): if self._len <= 0 and self.session is not None: self._len = self.session.get_length() if self._len < 0: # Raise TypeError when we don't know the length so that Python # will treat Collection as a generator raise TypeError("Layer does not support counting") return self._len @property def bounds(self): """Returns (minx, miny, maxx, maxy).""" if self._bounds is None and self.session is not None: self._bounds = self.session.get_extent() return self._bounds def _check_schema_driver_support(self): """Check support for the schema against the driver See GH#572 for discussion. """ gdal_version_major = get_gdal_version_tuple().major for field in self._schema["properties"].values(): field_type = field.split(":")[0] if not _driver_supports_field(self.driver, field_type): if self.driver == 'GPKG' and gdal_version_major < 2 and field_type == "datetime": raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") else: raise DriverSupportError("{driver} does not support {field_type} " "fields".format(driver=self.driver, field_type=field_type)) elif field_type in {'time', 'datetime', 'date'} and _driver_converts_field_type_silently_to_str(self.driver, field_type): if self._driver == "GeoJSON" and gdal_version_major < 2 and field_type in {'datetime', 'date'}: warnings.warn("GeoJSON driver in GDAL 1.x silently converts {} to string" " in non-standard format".format(field_type)) else: warnings.warn("{driver} driver silently converts {field_type} " "to string".format(driver=self.driver, field_type=field_type)) def flush(self): """Flush the buffer.""" if self.session is not None: self.session.sync(self) new_len = self.session.get_length() self._len = new_len > self._len and new_len or self._len self._bounds = None def close(self): """In append or write mode, flushes data to disk, then ends access.""" if self.session is not None and self.session.isactive(): if self.mode in ('a', 'w'): self.flush() log.debug("Flushed buffer") self.session.stop() log.debug("Stopped session") self.session = None self.iterator = None if self.env: self.env.__exit__() @property def closed(self): """``False`` if data can be accessed, otherwise ``True``.""" return self.session is None def __enter__(self): logging.getLogger('fiona.ogrext').addFilter(self.field_skip_log_filter) self._env = env_ctx_if_needed() self._env.__enter__() return self def __exit__(self, type, value, traceback): logging.getLogger('fiona.ogrext').removeFilter(self.field_skip_log_filter) self._env.__exit__() self.close() def __del__(self): # Note: you can't count on this being called. Call close() explicitly # or use the context manager protocol ("with"). self.close() ALL_GEOMETRY_TYPES = set([ geom_type for geom_type in GEOMETRY_TYPES.values() if "3D " not in geom_type and geom_type != "None"]) ALL_GEOMETRY_TYPES.add("None") def _get_valid_geom_types(schema, driver): """Returns a set of geometry types the schema will accept""" schema_geom_type = schema["geometry"] if isinstance(schema_geom_type, string_types) or schema_geom_type is None: schema_geom_type = (schema_geom_type,) valid_types = set() for geom_type in schema_geom_type: geom_type = str(geom_type).lstrip("3D ") if geom_type == "Unknown" or geom_type == "Any": valid_types.update(ALL_GEOMETRY_TYPES) else: if geom_type not in ALL_GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(geom_type) valid_types.add(geom_type) # shapefiles don't differentiate between single/multi geometries, except points if driver == "ESRI Shapefile" and "Point" not in valid_types: for geom_type in list(valid_types): if not geom_type.startswith("Multi"): valid_types.add("Multi" + geom_type) return valid_types def get_filetype(bytesbuf): """Detect compression type of bytesbuf. ZIP only. TODO: add others relevant to GDAL/OGR.""" if bytesbuf[:4].startswith(b'PK\x03\x04'): return 'zip' else: return '' class BytesCollection(Collection): """BytesCollection takes a buffer of bytes and maps that to a virtual file that can then be opened by fiona. """ def __init__(self, bytesbuf, **kwds): """Takes buffer of bytes whose contents is something we'd like to open with Fiona and maps it to a virtual file. """ if not isinstance(bytesbuf, binary_type): raise ValueError("input buffer must be bytes") # Hold a reference to the buffer, as bad things will happen if # it is garbage collected while in use. self.bytesbuf = bytesbuf # Map the buffer to a file. If the buffer contains a zipfile # we take extra steps in naming the buffer and in opening # it. If the requested driver is for GeoJSON, we append an an # appropriate extension to ensure the driver reads it. filetype = get_filetype(self.bytesbuf) ext = '' if filetype == 'zip': ext = '.zip' elif kwds.get('driver') == "GeoJSON": ext = '.json' self.virtual_file = buffer_to_virtual_file(self.bytesbuf, ext=ext) # Instantiate the parent class. super(BytesCollection, self).__init__(self.virtual_file, vsi=filetype, **kwds) def close(self): """Removes the virtual file associated with the class.""" super(BytesCollection, self).close() if self.virtual_file: remove_virtual_file(self.virtual_file) self.virtual_file = None self.bytesbuf = None def __repr__(self): return "<%s BytesCollection '%s', mode '%s' at %s>" % ( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, hex(id(self))) Fiona-1.8.21/fiona/compat.py000066400000000000000000000014101420023252700155610ustar00rootroot00000000000000import sys import collections try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict if sys.version_info[0] >= 3: from urllib.parse import urlparse from collections import UserDict from inspect import getfullargspec as getargspec else: from urlparse import urlparse from UserDict import UserDict from inspect import getargspec if sys.version_info >= (3, 3): from collections.abc import Mapping else: from collections import Mapping # Users can pass in objects that subclass a few different objects # More specifically, rasterio has a CRS() class that subclasses UserDict() # In Python 2 UserDict() is in its own module and does not subclass Mapping() DICT_TYPES = (dict, Mapping, UserDict) Fiona-1.8.21/fiona/crs.py000066400000000000000000000124321420023252700150730ustar00rootroot00000000000000"""Coordinate reference systems and functions PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4 coordinate reference systems are described by strings of parameters such as +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs here we use mappings: {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True} """ from six import string_types def to_string(crs): """Turn a parameter mapping into a more conventional PROJ.4 string. Mapping keys are tested against the ``all_proj_keys`` list. Values of ``True`` are omitted, leaving the key bare: {'no_defs': True} -> "+no_defs" and items where the value is otherwise not a str, int, or float are omitted. """ items = [] for k, v in sorted(filter( lambda x: x[0] in all_proj_keys and x[1] is not False and ( isinstance(x[1], (bool, int, float)) or isinstance(x[1], string_types)), crs.items())): items.append( "+" + "=".join( map(str, filter( lambda y: (y or y == 0) and y is not True, (k, v))))) return " ".join(items) def from_string(prjs): """Turn a PROJ.4 string into a mapping of parameters. Bare parameters like "+no_defs" are given a value of ``True``. All keys are checked against the ``all_proj_keys`` list. """ parts = [o.lstrip('+') for o in prjs.strip().split()] def parse(v): try: return int(v) except ValueError: pass try: return float(v) except ValueError: return v items = map( lambda kv: len(kv) == 2 and (kv[0], parse(kv[1])) or (kv[0], True), (p.split('=') for p in parts)) return dict((k, v) for k, v in items if k in all_proj_keys) def from_epsg(code): """Given an integer code, returns an EPSG-like mapping. Note: the input code is not validated against an EPSG database. """ if int(code) <= 0: raise ValueError("EPSG codes are positive integers") return {'init': "epsg:%s" % code, 'no_defs': True} # Below is the big list of PROJ4 parameters from # http://trac.osgeo.org/proj/wiki/GenParms. # It is parsed into a list of paramter keys ``all_proj_keys``. _param_data = """ +a Semimajor radius of the ellipsoid axis +alpha ? Used with Oblique Mercator and possibly a few others +axis Axis orientation (new in 4.8.0) +b Semiminor radius of the ellipsoid axis +datum Datum name (see `proj -ld`) +ellps Ellipsoid name (see `proj -le`) +init Initialize from a named CRS +k Scaling factor (old name) +k_0 Scaling factor (new name) +lat_0 Latitude of origin +lat_1 Latitude of first standard parallel +lat_2 Latitude of second standard parallel +lat_ts Latitude of true scale +lon_0 Central meridian +lonc ? Longitude used with Oblique Mercator and possibly a few others +lon_wrap Center longitude to use for wrapping (see below) +nadgrids Filename of NTv2 grid file to use for datum transforms (see below) +no_defs Don't use the /usr/share/proj/proj_def.dat defaults file +over Allow longitude output outside -180 to 180 range, disables wrapping (see below) +pm Alternate prime meridian (typically a city name, see below) +proj Projection name (see `proj -l`) +south Denotes southern hemisphere UTM zone +to_meter Multiplier to convert map units to 1.0m +towgs84 3 or 7 term datum transform parameters (see below) +units meters, US survey feet, etc. +vto_meter vertical conversion to meters. +vunits vertical units. +x_0 False easting +y_0 False northing +zone UTM zone +a Semimajor radius of the ellipsoid axis +alpha ? Used with Oblique Mercator and possibly a few others +azi +b Semiminor radius of the ellipsoid axis +belgium +beta +czech +e Eccentricity of the ellipsoid = sqrt(1 - b^2/a^2) = sqrt( f*(2-f) ) +ellps Ellipsoid name (see `proj -le`) +es Eccentricity of the ellipsoid squared +f Flattening of the ellipsoid (often presented as an inverse, e.g. 1/298) +gamma +geoc +guam +h +k Scaling factor (old name) +K +k_0 Scaling factor (new name) +lat_0 Latitude of origin +lat_1 Latitude of first standard parallel +lat_2 Latitude of second standard parallel +lat_b +lat_t +lat_ts Latitude of true scale +lon_0 Central meridian +lon_1 +lon_2 +lonc ? Longitude used with Oblique Mercator and possibly a few others +lsat +m +M +n +no_cut +no_off +no_rot +ns +o_alpha +o_lat_1 +o_lat_2 +o_lat_c +o_lat_p +o_lon_1 +o_lon_2 +o_lon_c +o_lon_p +o_proj +over +p +path +proj Projection name (see `proj -l`) +q +R +R_a +R_A Compute radius such that the area of the sphere is the same as the area of the ellipsoid +rf Reciprocal of the ellipsoid flattening term (e.g. 298) +R_g +R_h +R_lat_a +R_lat_g +rot +R_V +s +south Denotes southern hemisphere UTM zone +sym +t +theta +tilt +to_meter Multiplier to convert map units to 1.0m +units meters, US survey feet, etc. +vopt +W +westo +x_0 False easting +y_0 False northing +zone UTM zone +wktext Marker """ _lines = filter(lambda x: len(x) > 1, _param_data.split("\n")) all_proj_keys = list( set(line.split()[0].lstrip("+").strip() for line in _lines)) + ['no_mayo'] Fiona-1.8.21/fiona/drvsupport.py000066400000000000000000000276511420023252700165450ustar00rootroot00000000000000# -*- coding: utf-8 -*- from fiona.env import Env from fiona._env import get_gdal_version_num, calc_gdal_version_num # Here is the list of available drivers as (name, modes) tuples. Currently, # we only expose the defaults (excepting FileGDB). We also don't expose # the CSV or GeoJSON drivers. Use Python's csv and json modules instead. # Might still exclude a few more of these after making a pass through the # entries for each at https://gdal.org/drivers/vector/index.html to screen # out the multi-layer formats. supported_drivers = dict([ # OGR Vector Formats # Format Name Code Creation Georeferencing Compiled by default # Aeronav FAA files AeronavFAA No Yes Yes ("AeronavFAA", "r"), # ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects # Arc/Info Binary Coverage AVCBin No Yes Yes # multi-layer # ("AVCBin", "r"), # Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes # multi-layer # ("AVCE00", "r"), # Arc/Info Generate ARCGEN No No Yes ("ARCGEN", "r"), # Atlas BNA BNA Yes No Yes ("BNA", "rw"), # AutoCAD DWG DWG No No No # AutoCAD DXF DXF Yes No Yes ("DXF", "rw"), # Comma Separated Value (.csv) CSV Yes No Yes ("CSV", "raw"), # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl # DODS/OPeNDAP DODS No Yes No, needs libdap # EDIGEO EDIGEO No Yes Yes # multi-layer? Hard to tell from the OGR docs # ("EDIGEO", "r"), # ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl # ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library # multi-layer ("FileGDB", "raw"), ("OpenFileGDB", "r"), # ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library # ESRI ArcSDE SDE No Yes No, needs ESRI SDE # ESRIJSON ESRIJSON No Yes Yes ("ESRIJSON", "r"), # ESRI Shapefile ESRI Shapefile Yes Yes Yes ("ESRI Shapefile", "raw"), # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME ("FlatGeobuf", "rw"), # GeoJSON GeoJSON Yes Yes Yes ("GeoJSON", "raw"), # GeoJSONSeq GeoJSON sequences Yes Yes Yes ("GeoJSONSeq", "rw"), # Géoconcept Export Geoconcept Yes Yes Yes # multi-layers # ("Geoconcept", "raw"), # Geomedia .mdb Geomedia No No No, needs ODBC library # GeoPackage GPKG Yes Yes No, needs libsqlite3 ("GPKG", "raw"), # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) # Google Fusion Tables GFT Yes Yes No, needs libcurl # GML GML Yes Yes Yes (read support needs Xerces or libexpat) ("GML", "rw"), # GMT GMT Yes Yes Yes ("GMT", "rw"), # GMT renamed to OGR_GMT for GDAL 2.x ("OGR_GMT", "rw"), # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) # GPX GPX Yes Yes Yes (read support needs libexpat) ("GPX", "rw"), # GRASS GRASS No Yes No, needs libgrass # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes ("GPSTrackMaker", "rw"), # Hydrographic Transfer Format HTF No Yes Yes # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), # Idrisi Vector (.VCT) Idrisi No Yes Yes ("Idrisi", "r"), # Informix DataBlade IDB Yes Yes No, needs Informix DataBlade # INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) # INGRES INGRES Yes No No, needs INGRESS # KML KML Yes Yes Yes (read support needs libexpat) # LIBKML LIBKML Yes Yes No, needs libkml # Mapinfo File MapInfo File Yes Yes Yes ("MapInfo File", "raw"), # Microstation DGN DGN Yes No Yes ("DGN", "raw"), # Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE # Memory Memory Yes Yes Yes # MySQL MySQL No Yes No, needs MySQL library # NAS - ALKIS NAS No Yes No, needs Xerces # Oracle Spatial OCI Yes Yes No, needs OCI library # ODBC ODBC No Yes No, needs ODBC library # MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library # Open Document Spreadsheet ODS Yes No No, needs libexpat # OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library # OpenAir OpenAir No Yes Yes # multi-layer # ("OpenAir", "r"), # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) ("PCIDSK", "raw"), # PDS PDS No Yes Yes ("PDS", "r"), # PDS renamed to OGR_PDS for GDAL 2.x ("OGR_PDS", "r"), # PGDump PostgreSQL SQL dump Yes Yes Yes # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) # EPIInfo .REC REC No No Yes # S-57 (ENC) S57 No Yes Yes # multi-layer ("S57", "r"), # SDTS SDTS No Yes Yes # multi-layer # ("SDTS", "r"), # SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes # multi-layers # ("SEGUKOOA", "r"), # SEG-Y SEGY No No Yes ("SEGY", "r"), # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite ("SQLite", "raw"), # SUA SUA No Yes Yes ("SUA", "r"), # SVG SVG No Yes No, needs libexpat # TopoJSON TopoJSON No Yes Yes ("TopoJSON", "r"), # UK .NTF UK. NTF No Yes Yes # multi-layer # ("UK. NTF", "r"), # U.S. Census TIGER/Line TIGER No Yes Yes # multi-layer # ("TIGER", "r"), # VFK data VFK No Yes Yes # multi-layer # ("VFK", "r"), # VRT - Virtual Datasource VRT No Yes Yes # multi-layer # ("VRT", "r"), # OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl # MS Excel format XLS No No No, needs libfreexl # Office Open XML spreadsheet XLSX Yes No No, needs libexpat # X-Plane/Flighgear aeronautical data XPLANE No Yes Yes # multi-layer # ("XPLANE", "r") ]) # Minimal gdal version for different modes driver_mode_mingdal = { 'r': {'GPKG': (1, 11, 0), 'GeoJSONSeq': (2, 4, 0), 'FlatGeobuf': (3, 1, 0)}, 'w': {'GPKG': (1, 11, 0), 'PCIDSK': (2, 0, 0), 'GeoJSONSeq': (2, 4, 0), 'FlatGeobuf': (3, 1, 3)}, 'a': {'GPKG': (1, 11, 0), 'PCIDSK': (2, 0, 0), 'GeoJSON': (2, 1, 0), 'MapInfo File': (2, 0, 0)} } def _driver_supports_mode(driver, mode): """ Returns True if driver supports mode, False otherwise Note: this function is not part of Fiona's public API. """ if driver not in supported_drivers: return False if mode not in supported_drivers[driver]: return False if driver in driver_mode_mingdal[mode]: if get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal[mode][driver]): return False return True # Removes drivers in the supported_drivers dictionary that the # machine's installation of OGR due to how it is compiled. # OGR may not have optional libraries compiled or installed. def _filter_supported_drivers(): global supported_drivers with Env() as gdalenv: ogrdrv_names = gdalenv.drivers().keys() supported_drivers_copy = supported_drivers.copy() for drv in supported_drivers.keys(): if drv not in ogrdrv_names: del supported_drivers_copy[drv] supported_drivers = supported_drivers_copy _filter_supported_drivers() # driver_converts_to_str contains field type, driver combinations that are silently converted to string # None: field type is always converted to str # (2, 0, 0): starting from gdal 2.0 field type is not converted to string _driver_converts_to_str = { 'time': { 'CSV': None, 'PCIDSK': None, 'GeoJSON': (2, 0, 0), 'GPKG': None, 'GMT': None, 'OGR_GMT': None }, 'datetime': { 'CSV': None, 'PCIDSK': None, 'GeoJSON': (2, 0, 0), 'GML': (3, 1, 0), }, 'date': { 'CSV': None, 'PCIDSK': None, 'GeoJSON': (2, 0, 0), 'GMT': None, 'OGR_GMT': None, 'GML': (3, 1, 0), } } def _driver_converts_field_type_silently_to_str(driver, field_type): """ Returns True if the driver converts the field_type silently to str, False otherwise Note: this function is not part of Fiona's public API. """ if field_type in _driver_converts_to_str and driver in _driver_converts_to_str[field_type]: if _driver_converts_to_str[field_type][driver] is None: return True elif get_gdal_version_num() < calc_gdal_version_num(*_driver_converts_to_str[field_type][driver]): return True return False # None: field type is never supported, (2, 0, 0) field type is supported starting with gdal 2.0 _driver_field_type_unsupported = { 'time': { 'ESRI Shapefile': None, 'GPKG': (2, 0, 0), 'GPX': None, 'GPSTrackMaker': None, 'GML': (3, 1, 0), 'DGN': None, 'BNA': None, 'DXF': None, 'PCIDSK': (2, 1, 0), 'FileGDB': None, 'FlatGeobuf': None }, 'datetime': { 'ESRI Shapefile': None, 'GPKG': (2, 0, 0), 'DGN': None, 'BNA': None, 'DXF': None, 'PCIDSK': (2, 1, 0) }, 'date': { 'GPX': None, 'GPSTrackMaker': None, 'DGN': None, 'BNA': None, 'DXF': None, 'PCIDSK': (2, 1, 0), 'FileGDB': None, 'FlatGeobuf': None } } def _driver_supports_field(driver, field_type): """ Returns True if the driver supports the field_type, False otherwise Note: this function is not part of Fiona's public API. """ if field_type in _driver_field_type_unsupported and driver in _driver_field_type_unsupported[field_type]: if _driver_field_type_unsupported[field_type][driver] is None: return False elif get_gdal_version_num() < calc_gdal_version_num(*_driver_field_type_unsupported[field_type][driver]): return False return True # None: field type never supports timezones, (2, 0, 0): field type supports timezones with GDAL 2.0.0 _drivers_not_supporting_timezones = { 'datetime': { 'MapInfo File': None, 'GPKG': (3, 1, 0), 'GPSTrackMaker': (3, 1, 1), 'FileGDB': None, 'SQLite': (2, 4, 0) }, 'time': { 'MapInfo File': None, 'GPKG': None, 'GPSTrackMaker': None, 'GeoJSON': None, 'GeoJSONSeq': None, 'GML': None, 'CSV': None, 'GMT': None, 'OGR_GMT': None, 'SQLite': None } } def _driver_supports_timezones(driver, field_type): """ Returns True if the driver supports timezones for field_type, False otherwise Note: this function is not part of Fiona's public API. """ if field_type in _drivers_not_supporting_timezones and driver in _drivers_not_supporting_timezones[field_type]: if _drivers_not_supporting_timezones[field_type][driver] is None: return False elif get_gdal_version_num() < calc_gdal_version_num(*_drivers_not_supporting_timezones[field_type][driver]): return False return True # None: driver never supports timezones, (2, 0, 0): driver supports timezones with GDAL 2.0.0 _drivers_not_supporting_milliseconds = { 'GPSTrackMaker': None, 'FileGDB': None } def _driver_supports_milliseconds(driver): """ Returns True if the driver supports milliseconds, False otherwise Note: this function is not part of Fiona's public API. """ # GDAL 2.0 introduced support for milliseconds if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): return False if driver in _drivers_not_supporting_milliseconds: if _drivers_not_supporting_milliseconds[driver] is None: return False elif calc_gdal_version_num(*_drivers_not_supporting_milliseconds[driver]) < get_gdal_version_num(): return False return True Fiona-1.8.21/fiona/env.py000066400000000000000000000465121420023252700151020ustar00rootroot00000000000000"""Fiona's GDAL/AWS environment""" from functools import wraps, total_ordering import logging import os import re import threading import attr from six import string_types import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona._env import ( GDALDataFinder, GDALEnv, PROJDataFinder, calc_gdal_version_num, get_gdal_config, get_gdal_release_name, get_gdal_version_num, set_gdal_config, set_proj_data_search_path, ) from fiona.compat import getargspec from fiona.errors import EnvError, GDALVersionError from fiona.session import Session, DummySession class ThreadEnv(threading.local): def __init__(self): self._env = None # Initialises in each thread # When the outermost 'fiona.Env()' executes '__enter__' it # probes the GDAL environment to see if any of the supplied # config options already exist, the assumption being that they # were set with 'osgeo.gdal.SetConfigOption()' or possibly # 'fiona.env.set_gdal_config()'. The discovered options are # reinstated when the outermost Fiona environment exits. # Without this check any environment options that are present in # the GDAL environment and are also passed to 'fiona.Env()' # will be unset when 'fiona.Env()' tears down, regardless of # their value. For example: # # from osgeo import gdal import fiona # # gdal.SetConfigOption('key', 'value') # with fiona.Env(key='something'): # pass # # The config option 'key' would be unset when 'Env()' exits. # A more comprehensive solution would also leverage # https://trac.osgeo.org/gdal/changeset/37273 but this gets # Fiona + older versions of GDAL halfway there. One major # assumption is that environment variables are not set directly # with 'osgeo.gdal.SetConfigOption()' OR # 'fiona.env.set_gdal_config()' inside of a 'fiona.Env()'. self._discovered_options = None local = ThreadEnv() log = logging.getLogger(__name__) class Env(object): """Abstraction for GDAL and AWS configuration The GDAL library is stateful: it has a registry of format drivers, an error stack, and dozens of configuration options. Fiona's approach to working with GDAL is to wrap all the state up using a Python context manager (see PEP 343, https://www.python.org/dev/peps/pep-0343/). When the context is entered GDAL drivers are registered, error handlers are configured, and configuration options are set. When the context is exited, drivers are removed from the registry and other configurations are removed. Example: with fiona.Env(GDAL_CACHEMAX=512) as env: # All drivers are registered, GDAL's raster block cache # size is set to 512MB. # Commence processing... ... # End of processing. # At this point, configuration options are set to their # previous (possible unset) values. A boto3 session or boto3 session constructor arguments `aws_access_key_id`, `aws_secret_access_key`, `aws_session_token` may be passed to Env's constructor. In the latter case, a session will be created as soon as needed. AWS credentials are configured for GDAL as needed. """ @classmethod def default_options(cls): """Default configuration options Parameters ---------- None Returns ------- dict """ return { 'CHECK_WITH_INVERT_PROJ': True, 'GTIFF_IMPLICIT_JPEG_OVR': False, "FIONA_ENV": True } def __init__( self, session=None, **options): """Create a new GDAL/AWS environment. Note: this class is a context manager. GDAL isn't configured until the context is entered via `with fiona.Env():` Parameters ---------- session : optional A Session object. **options : optional A mapping of GDAL configuration options, e.g., `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. Returns ------- Env Notes ----- We raise EnvError if the GDAL config options AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY are given. AWS credentials are handled exclusively by boto3. Examples -------- >>> with Env(CPL_DEBUG=True, CPL_CURL_VERBOSE=True): ... with fiona.open("zip+https://example.com/a.zip") as col: ... print(col.meta) For access to secured cloud resources, a Fiona Session may be passed to the constructor. >>> import boto3 >>> from fiona.session import AWSSession >>> boto3_session = boto3.Session(...) >>> with Env(AWSSession(boto3_session)): ... with fiona.open("zip+s3://example/a.zip") as col: ... print(col.meta) """ if ('AWS_ACCESS_KEY_ID' in options or 'AWS_SECRET_ACCESS_KEY' in options): raise EnvError( "GDAL's AWS config options can not be directly set. " "AWS credentials are handled exclusively by boto3.") if session: self.session = session else: self.session = DummySession() self.options = options.copy() self.context_options = {} @classmethod def from_defaults(cls, session=None, **options): """Create an environment with default config options Parameters ---------- session : optional A Session object. **options : optional A mapping of GDAL configuration options, e.g., `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. Returns ------- Env Notes ----- The items in kwargs will be overlaid on the default values. """ opts = Env.default_options() opts.update(**options) return Env(session=session, **opts) @property def is_credentialized(self): """Test for existence of cloud credentials Returns ------- bool """ return hascreds() def credentialize(self): """Get credentials and configure GDAL Note well: this method is a no-op if the GDAL environment already has credentials, unless session is not None. Returns ------- None """ if hascreds(): pass else: cred_opts = self.session.get_credential_options() self.options.update(**cred_opts) setenv(**cred_opts) def drivers(self): """Return a mapping of registered drivers.""" return local._env.drivers() def __enter__(self): log.debug("Entering env context: %r", self) if local._env is None: log.debug("Starting outermost env") self._has_parent_env = False # See note directly above where _discovered_options is globally # defined. This MUST happen before calling 'defenv()'. local._discovered_options = {} # Don't want to reinstate the "FIONA_ENV" option. probe_env = {k for k in self.options.keys() if k != "FIONA_ENV"} for key in probe_env: val = get_gdal_config(key, normalize=False) if val is not None: local._discovered_options[key] = val log.debug("Discovered option: %s=%s", key, val) defenv(**self.options) self.context_options = {} else: self._has_parent_env = True self.context_options = getenv() setenv(**self.options) self.credentialize() log.debug("Entered env context: %r", self) return self def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): log.debug("Exiting env context: %r", self) delenv() if self._has_parent_env: defenv() setenv(**self.context_options) else: log.debug("Exiting outermost env") # See note directly above where _discovered_options is globally # defined. while local._discovered_options: key, val = local._discovered_options.popitem() set_gdal_config(key, val, normalize=False) log.debug( "Set discovered option back to: '%s=%s", key, val) local._discovered_options = None log.debug("Exited env context: %r", self) def defenv(**options): """Create a default environment if necessary.""" if local._env: log.debug("GDAL environment exists: %r", local._env) else: log.debug("No GDAL environment exists") local._env = GDALEnv() local._env.update_config_options(**options) log.debug( "New GDAL environment %r created", local._env) local._env.start() def getenv(): """Get a mapping of current options.""" if not local._env: raise EnvError("No GDAL environment exists") else: log.debug("Got a copy of environment %r options", local._env) return local._env.options.copy() def hasenv(): return bool(local._env) def setenv(**options): """Set options in the existing environment.""" if not local._env: raise EnvError("No GDAL environment exists") else: local._env.update_config_options(**options) log.debug("Updated existing %r with options %r", local._env, options) def hascreds(): return local._env is not None and all(key in local._env.get_config_options() for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']) def delenv(): """Delete options in the existing environment.""" if not local._env: raise EnvError("No GDAL environment exists") else: local._env.clear_config_options() log.debug("Cleared existing %r options", local._env) local._env.stop() local._env = None class NullContextManager(object): def __init__(self): pass def __enter__(self): return self def __exit__(self, *args): pass def env_ctx_if_needed(): """Return an Env if one does not exist Returns ------- Env or a do-nothing context manager """ if local._env: return NullContextManager() else: return Env.from_defaults() def ensure_env(f): """A decorator that ensures an env exists before a function calls any GDAL C functions. Parameters ---------- f : function A function. Returns ------- A function wrapper. Notes ----- If there is already an existing environment, the wrapper does nothing and immediately calls f with the given arguments. """ @wraps(f) def wrapper(*args, **kwargs): if local._env: return f(*args, **kwargs) else: with Env.from_defaults(): return f(*args, **kwargs) return wrapper def ensure_env_with_credentials(f): """Ensures a config environment exists and has credentials. Parameters ---------- f : function A function. Returns ------- A function wrapper. Notes ----- The function wrapper checks the first argument of f and credentializes the environment if the first argument is a URI with scheme "s3". If there is already an existing environment, the wrapper does nothing and immediately calls f with the given arguments. """ @wraps(f) def wrapper(*args, **kwargs): if local._env: return f(*args, **kwargs) else: if isinstance(args[0], str): session = Session.from_path(args[0]) else: session = Session.from_path(None) with Env.from_defaults(session=session): log.debug("Credentialized: {!r}".format(getenv())) return f(*args, **kwargs) return wrapper @attr.s(slots=True) @total_ordering class GDALVersion(object): """Convenience class for obtaining GDAL major and minor version components and comparing between versions. This is highly simplistic and assumes a very normal numbering scheme for versions and ignores everything except the major and minor components. """ major = attr.ib(default=0, validator=attr.validators.instance_of(int)) minor = attr.ib(default=0, validator=attr.validators.instance_of(int)) def __eq__(self, other): return (self.major, self.minor) == tuple(other.major, other.minor) def __lt__(self, other): return (self.major, self.minor) < tuple(other.major, other.minor) def __repr__(self): return "GDALVersion(major={0}, minor={1})".format(self.major, self.minor) def __str__(self): return "{0}.{1}".format(self.major, self.minor) @classmethod def parse(cls, input): """ Parses input tuple or string to GDALVersion. If input is a GDALVersion instance, it is returned. Parameters ---------- input: tuple of (major, minor), string, or instance of GDALVersion Returns ------- GDALVersion instance """ if isinstance(input, cls): return input if isinstance(input, tuple): return cls(*input) elif isinstance(input, string_types): # Extract major and minor version components. # alpha, beta, rc suffixes ignored match = re.search(r'^\d+\.\d+', input) if not match: raise ValueError( "value does not appear to be a valid GDAL version " "number: {}".format(input)) major, minor = (int(c) for c in match.group().split('.')) return cls(major=major, minor=minor) raise TypeError("GDALVersion can only be parsed from a string or tuple") @classmethod def runtime(cls): """Return GDALVersion of current GDAL runtime""" return cls.parse(get_gdal_release_name()) def at_least(self, other): other = self.__class__.parse(other) return self >= other def require_gdal_version(version, param=None, values=None, is_max_version=False, reason=''): """A decorator that ensures the called function or parameters are supported by the runtime version of GDAL. Raises GDALVersionError if conditions are not met. Examples: \b @require_gdal_version('2.2') def some_func(): calling `some_func` with a runtime version of GDAL that is < 2.2 raises a GDALVersionErorr. \b @require_gdal_version('2.2', param='foo') def some_func(foo='bar'): calling `some_func` with parameter `foo` of any value on GDAL < 2.2 raises a GDALVersionError. \b @require_gdal_version('2.2', param='foo', values=('bar',)) def some_func(foo=None): calling `some_func` with parameter `foo` and value `bar` on GDAL < 2.2 raises a GDALVersionError. Parameters ------------ version: tuple, string, or GDALVersion param: string (optional, default: None) If `values` are absent, then all use of this parameter with a value other than default value requires at least GDAL `version`. values: tuple, list, or set (optional, default: None) contains values that require at least GDAL `version`. `param` is required for `values`. is_max_version: bool (optional, default: False) if `True` indicates that the version provided is the maximum version allowed, instead of requiring at least that version. reason: string (optional: default: '') custom error message presented to user in addition to message about GDAL version. Use this to provide an explanation of what changed if necessary context to the user. Returns --------- wrapped function """ if values is not None: if param is None: raise ValueError( 'require_gdal_version: param must be provided with values') if not isinstance(values, (tuple, list, set)): raise ValueError( 'require_gdal_version: values must be a tuple, list, or set') version = GDALVersion.parse(version) runtime = GDALVersion.runtime() inequality = '>=' if runtime < version else '<=' reason = '\n{0}'.format(reason) if reason else reason def decorator(f): @wraps(f) def wrapper(*args, **kwds): if ((runtime < version and not is_max_version) or (is_max_version and runtime > version)): if param is None: raise GDALVersionError( "GDAL version must be {0} {1}{2}".format( inequality, str(version), reason)) # normalize args and kwds to dict argspec = getargspec(f) full_kwds = kwds.copy() if argspec.args: full_kwds.update(dict(zip(argspec.args[:len(args)], args))) if argspec.defaults: defaults = dict(zip( reversed(argspec.args), reversed(argspec.defaults))) else: defaults = {} if param in full_kwds: if values is None: if param not in defaults or ( full_kwds[param] != defaults[param]): raise GDALVersionError( 'usage of parameter "{0}" requires ' 'GDAL {1} {2}{3}'.format(param, inequality, version, reason)) elif full_kwds[param] in values: raise GDALVersionError( 'parameter "{0}={1}" requires ' 'GDAL {2} {3}{4}'.format( param, full_kwds[param], inequality, version, reason)) return f(*args, **kwds) return wrapper return decorator # Patch the environment if needed, such as in the installed wheel case. if 'GDAL_DATA' not in os.environ: path = GDALDataFinder().search_wheel() if path: log.debug("GDAL data found in package: path=%r.", path) set_gdal_config("GDAL_DATA", path) # See https://github.com/mapbox/rasterio/issues/1631. elif GDALDataFinder().find_file("header.dxf"): log.debug("GDAL data files are available at built-in paths.") else: path = GDALDataFinder().search() if path: set_gdal_config("GDAL_DATA", path) log.debug("GDAL data found in other locations: path=%r.", path) if "PROJ_LIB" in os.environ: path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) elif PROJDataFinder().search_wheel(): path = PROJDataFinder().search_wheel() log.debug("PROJ data found in package: path=%r.", path) set_proj_data_search_path(path) # See https://github.com/mapbox/rasterio/issues/1631. elif PROJDataFinder().has_data(): log.debug("PROJ data files are available at built-in paths.") else: path = PROJDataFinder().search() if path: log.debug("PROJ data found in other locations: path=%r.", path) set_proj_data_search_path(path) Fiona-1.8.21/fiona/errors.py000066400000000000000000000030531420023252700156170ustar00rootroot00000000000000# Errors. class FionaError(Exception): """Base Fiona error""" class FionaValueError(ValueError): """Fiona-specific value errors""" class DriverError(FionaValueError): """Encapsulates unsupported driver and driver mode errors.""" class SchemaError(FionaValueError): """When a schema mapping has no properties or no geometry.""" class CRSError(FionaValueError): """When a crs mapping has neither init or proj items.""" class DataIOError(IOError): """IO errors involving driver registration or availability.""" class DriverIOError(IOError): """A format specific driver error.""" class DriverSupportError(DriverIOError): """Driver does not support schema""" class DatasetDeleteError(IOError): """Failure to delete a dataset""" class FieldNameEncodeError(UnicodeEncodeError): """Failure to encode a field name.""" class UnsupportedGeometryTypeError(KeyError): """When a OGR geometry type isn't supported by Fiona.""" class GeometryTypeValidationError(FionaValueError): """Tried to write a geometry type not specified in the schema""" class TransactionError(RuntimeError): """Failure relating to GDAL transactions""" class EnvError(FionaError): """Environment Errors""" class GDALVersionError(FionaError): """Raised if the runtime version of GDAL does not meet the required version of GDAL. """ class FionaDeprecationWarning(UserWarning): """A warning about deprecation of Fiona features""" class FeatureWarning(UserWarning): """A warning about serialization of a feature""" Fiona-1.8.21/fiona/fio/000077500000000000000000000000001420023252700145055ustar00rootroot00000000000000Fiona-1.8.21/fiona/fio/__init__.py000066400000000000000000000007761420023252700166300ustar00rootroot00000000000000"""Fiona's command line interface""" from functools import wraps def with_context_env(f): """Pops the Fiona Env from the passed context and executes the wrapped func in the context of that obj. Click's pass_context decorator must precede this decorator, or else there will be no context in the wrapper args. """ @wraps(f) def wrapper(*args, **kwds): ctx = args[0] env = ctx.obj.pop('env') with env: return f(*args, **kwds) return wrapper Fiona-1.8.21/fiona/fio/bounds.py000066400000000000000000000062461420023252700163610ustar00rootroot00000000000000"""$ fio bounds""" import json import logging import click from cligj import precision_opt, use_rs_opt import fiona from fiona.fio.helpers import obj_gen from fiona.fio import with_context_env @click.command(short_help="Print the extent of GeoJSON objects") @precision_opt @click.option('--explode/--no-explode', default=False, help="Explode collections into features (default: no).") @click.option('--with-id/--without-id', default=False, help="Print GeoJSON ids and bounding boxes together " "(default: without).") @click.option('--with-obj/--without-obj', default=False, help="Print GeoJSON objects and bounding boxes together " "(default: without).") @use_rs_opt @click.pass_context @with_context_env def bounds(ctx, precision, explode, with_id, with_obj, use_rs): """Print the bounding boxes of GeoJSON objects read from stdin. Optionally explode collections and print the bounds of their features. To print identifiers for input objects along with their bounds as a {id: identifier, bbox: bounds} JSON object, use --with-id. To print the input objects themselves along with their bounds as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. """ logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: source = obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) xs = [] ys = [] features = obj.get('features') or [obj] for j, feat in enumerate(features): feat_id = feat.get('id', 'feature:' + str(i)) w, s, e, n = fiona.bounds(feat) if precision > 0: w, s, e, n = (round(v, precision) for v in (w, s, e, n)) if explode: if with_id: rec = { 'parent': obj_id, 'id': feat_id, 'bbox': (w, s, e, n)} elif with_obj: feat.update(parent=obj_id, bbox=(w, s, e, n)) rec = feat else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) else: xs.extend([w, e]) ys.extend([s, n]) if not explode: w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) if with_id: rec = {'id': obj_id, 'bbox': (w, s, e, n)} elif with_obj: obj.update(id=obj_id, bbox=(w, s, e, n)) rec = obj else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/calc.py000066400000000000000000000043101420023252700157570ustar00rootroot00000000000000from __future__ import division import json import logging import click from cligj import use_rs_opt from .helpers import obj_gen, eval_feature_expression from fiona.fio import with_context_env @click.command(short_help="Calculate GeoJSON property by Python expression") @click.argument('property_name') @click.argument('expression') @click.option('--overwrite', is_flag=True, default=False, help="Overwrite properties, default: False") @use_rs_opt @click.pass_context @with_context_env def calc(ctx, property_name, expression, overwrite, use_rs): """ Create a new property on GeoJSON features using the specified expression. \b The expression is evaluated in a restricted namespace containing: - sum, pow, min, max and the imported math module - shape (optional, imported from shapely.geometry if available) - bool, int, str, len, float type conversions - f (the feature to be evaluated, allows item access via javascript-style dot notation using munch) The expression will be evaluated for each feature and its return value will be added to the properties as the specified property_name. Existing properties will not be overwritten by default (an Exception is raised). Example \b $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" """ logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: source = obj_gen(stdin) for i, obj in enumerate(source): features = obj.get('features') or [obj] for j, feat in enumerate(features): if not overwrite and property_name in feat['properties']: raise click.UsageError( '{0} already exists in properties; ' 'rename or use --overwrite'.format(property_name)) feat['properties'][property_name] = eval_feature_expression( feat, expression) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/cat.py000066400000000000000000000062771420023252700156420ustar00rootroot00000000000000"""$ fio cat""" import json import logging import warnings import click import cligj import fiona from fiona.transform import transform_geom from fiona.fio import options, with_context_env warnings.simplefilter('default') # Cat command @click.command(short_help="Concatenate and print the features of datasets") @click.argument('files', nargs=-1, required=True, metavar="INPUTS...") @click.option('--layer', default=None, multiple=True, callback=options.cb_multilayer, help="Input layer(s), specified as 'fileindex:layer` " "For example, '1:foo,2:bar' will concatenate layer foo " "from file 1 and layer bar from file 2") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @options.dst_crs_opt @cligj.use_rs_opt @click.option( "--bbox", default=None, metavar="w,s,e,n", help="filter for features intersecting a bounding box", ) @click.option( "--cut-at-antimeridian", is_flag=True, default=False, help="Optionally cut geometries at the anti-meridian. To be used only for a geographic destination CRS.", ) @click.pass_context @with_context_env def cat( ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox, cut_at_antimeridian, layer, ): """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ logger = logging.getLogger(__name__) dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') # Validate file idexes provided in --layer option # (can't pass the files to option callback) if layer: options.validate_multilayer_file_index(files, layer) # first layer is the default for i in range(1, len(files) + 1): if str(i) not in layer.keys(): layer[str(i)] = [0] try: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, path in enumerate(files, 1): for lyr in layer[str(i)]: with fiona.open(path, layer=lyr) as src: for i, feat in src.items(bbox=bbox): if dst_crs or precision >= 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=cut_at_antimeridian, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/collect.py000066400000000000000000000173241420023252700165130ustar00rootroot00000000000000"""$ fio collect""" from functools import partial, wraps import json import logging import click import cligj from fiona.fio import helpers, options, with_context_env from fiona.transform import transform_geom @click.command(short_help="Collect a sequence of features.") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--record-buffered/--no-record-buffered', default=False, help="Economical buffering of writes at record, not collection " "(default), level.") @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @options.src_crs_opt @click.option('--with-ld-context/--without-ld-context', default=False, help="add a JSON-LD context to JSON output.") @click.option('--add-ld-context-item', multiple=True, help="map a term to a URI and add it to the output's JSON LD " "context.") @click.option('--parse/--no-parse', default=True, help="load and dump the geojson feature (default is True)") @click.pass_context @with_context_env def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, src_crs, with_ld_context, add_ld_context_item, parse): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' if src_crs: if not parse: raise click.UsageError("Can't specify --src-crs with --no-parse") transformer = partial(transform_geom, src_crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) else: def transformer(x): return x first_line = next(stdin) # If parsing geojson if parse: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) buffer = line.strip(u'\x1e') else: buffer += line else: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) else: def feature_text_gen(): feat = json.loads(first_line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) for line in stdin: feat = json.loads(line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) # If *not* parsing geojson else: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: yield buffer buffer = line.strip(u'\x1e') else: buffer += line else: yield buffer else: def feature_text_gen(): yield first_line for line in stdin: yield line try: source = feature_text_gen() if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") # Try the first record. try: i, first = 0, next(source) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(first.replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(source, 1): try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(rec.replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") sink.write(",".join(source)) sink.write("]") sink.write(tail) sink.write("\n") except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/distrib.py000066400000000000000000000021301420023252700165130ustar00rootroot00000000000000"""$ fio distrib""" import json import logging import click import cligj from fiona.fio import helpers, with_context_env @click.command() @cligj.use_rs_opt @click.pass_context @with_context_env def distrib(ctx, use_rs): """Distribute features from a collection. Print the features of GeoJSON objects read from stdin. """ logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: source = helpers.obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) features = obj.get('features') or [obj] for j, feat in enumerate(features): if obj.get('type') == 'FeatureCollection': feat['parent'] = obj_id feat_id = feat.get('id', 'feature:' + str(i)) feat['id'] = feat_id if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/dump.py000066400000000000000000000156731420023252700160400ustar00rootroot00000000000000"""$ fio dump""" from functools import partial import json import logging import click import cligj import fiona from fiona.fio import helpers, options, with_context_env from fiona.transform import transform_geom @click.command(short_help="Dump a dataset to GeoJSON.") @click.argument('input', required=True) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " "numbering when accessed by index.") @click.option('--encoding', help="Specify encoding of the input file.") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--record-buffered/--no-record-buffered', default=False, help="Economical buffering of writes at record, not collection " "(default), level.") @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @click.option('--with-ld-context/--without-ld-context', default=False, help="add a JSON-LD context to JSON output.") @click.option('--add-ld-context-item', multiple=True, help="map a term to a URI and add it to the output's JSON LD " "context.") @click.pass_context @with_context_env def dump(ctx, input, encoding, precision, indent, compact, record_buffered, ignore_errors, with_ld_context, add_ld_context_item, layer): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" logger = logging.getLogger(__name__) sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' open_kwds = {} if encoding: open_kwds['encoding'] = encoding if layer: open_kwds['layer'] = layer def transformer(crs, feat): tg = partial(transform_geom, crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) feat['geometry'] = tg(feat['geometry']) return feat try: with fiona.open(input, **open_kwds) as source: meta = source.meta meta['fields'] = dict(source.schema['properties'].items()) if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps( collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") itr = iter(source) # Try the first record. try: i, first = 0, next(itr) first = transformer(first) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(json.dumps( first, **dump_kwds).replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(itr, 1): rec = transformer(rec) try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(json.dumps( rec, **dump_kwds).replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs']} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) collection['features'] = [ helpers.id_record(transformer(rec)) for rec in source] else: collection['features'] = [ transformer(source.crs, rec) for rec in source] json.dump(collection, sink, **dump_kwds) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/env.py000066400000000000000000000027701420023252700156550ustar00rootroot00000000000000"""$ fio env""" import json import os import click import fiona import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona._env import GDALDataFinder, PROJDataFinder @click.command(short_help="Print information about the fio environment.") @click.option('--formats', 'key', flag_value='formats', default=True, help="Enumerate the available formats.") @click.option('--credentials', 'key', flag_value='credentials', default=False, help="Print credentials.") @click.option('--gdal-data', 'key', flag_value='gdal_data', default=False, help="Print GDAL data path.") @click.option('--proj-data', 'key', flag_value='proj_data', default=False, help="Print PROJ data path.") @click.pass_context def env(ctx, key): """Print information about the Fiona environment: available formats, etc. """ stdout = click.get_text_stream('stdout') with ctx.obj['env'] as env: if key == 'formats': for k, v in sorted(fiona.supported_drivers.items()): modes = ', '.join("'" + m + "'" for m in v) stdout.write("%s (modes %s)\n" % (k, modes)) stdout.write('\n') elif key == 'credentials': click.echo(json.dumps(env.session.credentials)) elif key == 'gdal_data': click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search()) elif key == 'proj_data': click.echo(os.environ.get('PROJ_LIB') or PROJDataFinder().search()) Fiona-1.8.21/fiona/fio/filter.py000066400000000000000000000032531420023252700163470ustar00rootroot00000000000000"""$ fio filter""" import json import logging import click from cligj import use_rs_opt from fiona.fio.helpers import obj_gen, eval_feature_expression from fiona.fio import with_context_env @click.command() @click.argument('filter_expression') @use_rs_opt @click.pass_context @with_context_env def filter(ctx, filter_expression, use_rs): """ Filter GeoJSON features by python expression. Features are read from stdin. The expression is evaluated in a restricted namespace containing: - sum, pow, min, max and the imported math module - shape (optional, imported from shapely.geometry if available) - bool, int, str, len, float type conversions - f (the feature to be evaluated, allows item access via javascript-style dot notation using munch) The expression will be evaluated for each feature and, if true, the feature will be included in the output. For example: \b $ fio cat data.shp \\ | fio filter "f.properties.area > 1000.0" \\ | fio collect > large_polygons.geojson """ logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: source = obj_gen(stdin) for i, obj in enumerate(source): features = obj.get('features') or [obj] for j, feat in enumerate(features): if not eval_feature_expression(feat, filter_expression): continue if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/helpers.py000066400000000000000000000062031420023252700165220ustar00rootroot00000000000000""" Helper objects needed by multiple CLI commands. """ from functools import partial import json import math import warnings from munch import munchify warnings.simplefilter('default') def obj_gen(lines): """Return a generator of JSON objects loaded from ``lines``.""" first_line = next(lines) if first_line.startswith(u'\x1e'): def gen(): buffer = first_line.strip(u'\x1e') for line in lines: if line.startswith(u'\x1e'): if buffer: yield json.loads(buffer) buffer = line.strip(u'\x1e') else: buffer += line else: yield json.loads(buffer) else: def gen(): yield json.loads(first_line) for line in lines: yield json.loads(line) return gen() def nullable(val, cast): if val is None: return None else: return cast(val) def eval_feature_expression(feature, expression): safe_dict = {'f': munchify(feature)} safe_dict.update({ 'sum': sum, 'pow': pow, 'min': min, 'max': max, 'math': math, 'bool': bool, 'int': partial(nullable, int), 'str': partial(nullable, str), 'float': partial(nullable, float), 'len': partial(nullable, len), }) try: from shapely.geometry import shape safe_dict['shape'] = shape except ImportError: pass return eval(expression, {"__builtins__": None}, safe_dict) def make_ld_context(context_items): """Returns a JSON-LD Context object. See http://json-ld.org/spec/latest/json-ld.""" ctx = { "@context": { "geojson": "http://ld.geojson.org/vocab#", "Feature": "geojson:Feature", "FeatureCollection": "geojson:FeatureCollection", "GeometryCollection": "geojson:GeometryCollection", "LineString": "geojson:LineString", "MultiLineString": "geojson:MultiLineString", "MultiPoint": "geojson:MultiPoint", "MultiPolygon": "geojson:MultiPolygon", "Point": "geojson:Point", "Polygon": "geojson:Polygon", "bbox": { "@container": "@list", "@id": "geojson:bbox" }, "coordinates": "geojson:coordinates", "datetime": "http://www.w3.org/2006/time#inXSDDateTime", "description": "http://purl.org/dc/terms/description", "features": { "@container": "@set", "@id": "geojson:features" }, "geometry": "geojson:geometry", "id": "@id", "properties": "geojson:properties", "start": "http://www.w3.org/2006/time#hasBeginning", "stop": "http://www.w3.org/2006/time#hasEnding", "title": "http://purl.org/dc/terms/title", "type": "@type", "when": "geojson:when" } } for item in context_items or []: t, uri = item.split("=") ctx[t.strip()] = uri.strip() return ctx def id_record(rec): """Converts a record's id to a blank node id and returns the record.""" rec['id'] = '_:f%s' % rec['id'] return rec Fiona-1.8.21/fiona/fio/info.py000066400000000000000000000054471420023252700160240ustar00rootroot00000000000000"""$ fio info""" import logging import json import click from cligj import indent_opt import fiona import fiona.crs from fiona.errors import DriverError from fiona.fio import options, with_context_env @click.command() # One or more files. @click.argument('input', required=True) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " "numbering when accessed by index.") @indent_opt # Options to pick out a single metadata item and print it as # a string. @click.option('--count', 'meta_member', flag_value='count', help="Print the count of features.") @click.option('-f', '--format', '--driver', 'meta_member', flag_value='driver', help="Print the format driver.") @click.option('--crs', 'meta_member', flag_value='crs', help="Print the CRS as a PROJ.4 string.") @click.option('--bounds', 'meta_member', flag_value='bounds', help="Print the boundary coordinates " "(left, bottom, right, top).") @click.option('--name', 'meta_member', flag_value='name', help="Print the datasource's name.") @click.pass_context @with_context_env def info(ctx, input, indent, meta_member, layer): """ Print information about a dataset. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ logger = logging.getLogger(__name__) try: with fiona.open(input, layer=layer) as src: info = src.meta info.update(name=src.name) try: info.update(bounds=src.bounds) except DriverError: info.update(bounds=None) logger.debug("Setting 'bounds' to None - driver " "was not able to calculate bounds") try: info.update(count=len(src)) except TypeError: info.update(count=None) logger.debug("Setting 'count' to None/null - layer does " "not support counting") proj4 = fiona.crs.to_string(src.crs) if proj4.startswith('+init=epsg'): proj4 = proj4.split('=')[1].upper() info['crs'] = proj4 if meta_member: if isinstance(info[meta_member], (list, tuple)): click.echo(" ".join(map(str, info[meta_member]))) else: click.echo(info[meta_member]) else: click.echo(json.dumps(info, indent=indent)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/insp.py000066400000000000000000000026331420023252700160340ustar00rootroot00000000000000"""$ fio insp""" import code import logging import sys import click import fiona from fiona.fio import with_context_env @click.command(short_help="Open a dataset and start an interpreter.") @click.argument('src_path', required=True) @click.option('--ipython', 'interpreter', flag_value='ipython', help="Use IPython as interpreter.") @click.pass_context @with_context_env def insp(ctx, src_path, interpreter): """Open a collection within an interactive interpreter. """ logger = logging.getLogger(__name__) banner = 'Fiona %s Interactive Inspector (Python %s)\n' \ 'Type "src.schema", "next(src)", or "help(src)" ' \ 'for more information.' \ % (fiona.__version__, '.'.join(map(str, sys.version_info[:3]))) try: with fiona.open(src_path) as src: scope = locals() if not interpreter: code.interact(banner, local=scope) elif interpreter == 'ipython': import IPython IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=scope) else: raise click.ClickException( 'Interpreter {} is unsupported or missing ' 'dependencies'.format(interpreter)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/load.py000066400000000000000000000067211420023252700160040ustar00rootroot00000000000000"""$ fio load""" from functools import partial import logging import click import cligj import fiona from fiona.fio import options, with_context_env from fiona.schema import FIELD_TYPES_MAP_REV from fiona.transform import transform_geom def _cb_key_val(ctx, param, value): """ click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect in a dictionary like the one below, which is what the CLI function receives. If no value or `None` is received then an empty dictionary is returned. { 'KEY1': 'VAL1', 'KEY2': 'VAL2' } Note: `==VAL` breaks this as `str.split('=', 1)` is used. """ if not value: return {} else: out = {} for pair in value: if "=" not in pair: raise click.BadParameter( "Invalid syntax for KEY=VAL arg: {}".format(pair) ) else: k, v = pair.split("=", 1) k = k.lower() v = v.lower() out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v return out @click.command(short_help="Load GeoJSON to a dataset in another format.") @click.argument('output', required=True) @click.option('-f', '--format', '--driver', 'driver', required=True, help="Output format driver name.") @options.src_crs_opt @click.option('--dst-crs', '--dst_crs', help="Destination CRS. Defaults to --src-crs when not given.") @cligj.features_in_arg @click.option( "--layer", metavar="INDEX|NAME", callback=options.cb_layer, help="Load features into specified layer. Layers use " "zero-based numbering when accessed by index.", ) @click.option( "--co", "--profile", "creation_options", metavar="NAME=VALUE", multiple=True, callback=_cb_key_val, help="Driver specific creation options. See the documentation for the selected output driver for more information.", ) @click.pass_context @with_context_env def load(ctx, output, driver, src_crs, dst_crs, features, layer, creation_options): """Load features from JSON to a file in another format. The input is a GeoJSON feature collection or optionally a sequence of GeoJSON feature objects. """ logger = logging.getLogger(__name__) dst_crs = dst_crs or src_crs if src_crs and dst_crs and src_crs != dst_crs: transformer = partial(transform_geom, src_crs, dst_crs, antimeridian_cutting=True, precision=-1) else: def transformer(x): return x def feature_gen(): for feat in features: feat['geometry'] = transformer(feat['geometry']) yield feat try: source = feature_gen() # Use schema of first feature as a template. # TODO: schema specified on command line? first = next(source) schema = {'geometry': first['geometry']['type']} schema['properties'] = dict([ (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str') for k, v in first['properties'].items()]) with fiona.open( output, "w", driver=driver, crs=dst_crs, schema=schema, layer=layer, **creation_options ) as dst: dst.write(first) dst.writerecords(source) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.8.21/fiona/fio/ls.py000066400000000000000000000006201420023252700154730ustar00rootroot00000000000000"""$ fiona ls""" import json import click from cligj import indent_opt import fiona from fiona.fio import with_context_env @click.command() @click.argument('input', required=True) @indent_opt @click.pass_context @with_context_env def ls(ctx, input, indent): """ List layers in a datasource. """ result = fiona.listlayers(input) click.echo(json.dumps(result, indent=indent)) Fiona-1.8.21/fiona/fio/main.py000066400000000000000000000035451420023252700160120ustar00rootroot00000000000000""" Main click group for the CLI. Needs to be isolated for entry-point loading. """ import logging from pkg_resources import iter_entry_points import sys import click from click_plugins import with_plugins from cligj import verbose_opt, quiet_opt import fiona from fiona import __version__ as fio_version from fiona.session import AWSSession, DummySession def configure_logging(verbosity): log_level = max(10, 30 - 10 * verbosity) logging.basicConfig(stream=sys.stderr, level=log_level) @with_plugins(ep for ep in list(iter_entry_points('fiona.fio_commands')) + list(iter_entry_points('fiona.fio_plugins'))) @click.group() @verbose_opt @quiet_opt @click.option( "--aws-profile", help="Select a profile from the AWS credentials file") @click.option( "--aws-no-sign-requests", is_flag=True, help="Make requests anonymously") @click.option( "--aws-requester-pays", is_flag=True, help="Requester pays data transfer costs") @click.version_option(fio_version) @click.version_option(fiona.__gdal_version__, '--gdal-version', prog_name='GDAL') @click.version_option(sys.version, '--python-version', prog_name='Python') @click.pass_context def main_group( ctx, verbose, quiet, aws_profile, aws_no_sign_requests, aws_requester_pays): """Fiona command line interface. """ verbosity = verbose - quiet configure_logging(verbosity) ctx.obj = {} ctx.obj["verbosity"] = verbosity ctx.obj["aws_profile"] = aws_profile envopts = {"CPL_DEBUG": (verbosity > 2)} if aws_profile or aws_no_sign_requests: session = AWSSession( profile_name=aws_profile, aws_unsigned=aws_no_sign_requests, requester_pays=aws_requester_pays, ) else: session = DummySession() ctx.obj["env"] = fiona.Env(session=session, **envopts) Fiona-1.8.21/fiona/fio/options.py000066400000000000000000000021671420023252700165600ustar00rootroot00000000000000"""Common commandline options for `fio`""" from collections import defaultdict import click src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.") dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.") def cb_layer(ctx, param, value): """Let --layer be a name or index.""" if value is None or not value.isdigit(): return value else: return int(value) def cb_multilayer(ctx, param, value): """ Transform layer options from strings ("1:a,1:b", "2:a,2:c,2:z") to { '1': ['a', 'b'], '2': ['a', 'c', 'z'] } """ out = defaultdict(list) for raw in value: for v in raw.split(','): ds, name = v.split(':') out[ds].append(name) return out def validate_multilayer_file_index(files, layerdict): """ Ensure file indexes provided in the --layer option are valid """ for key in layerdict.keys(): if key not in [str(k) for k in range(1, len(files) + 1)]: layer = key + ":" + layerdict[key][0] raise click.BadParameter("Layer {} does not exist".format(layer)) Fiona-1.8.21/fiona/fio/rm.py000066400000000000000000000014201420023252700154720ustar00rootroot00000000000000import click import logging import fiona from fiona.fio import with_context_env logger = logging.getLogger(__name__) @click.command(help="Remove a datasource or an individual layer.") @click.argument("input", required=True) @click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.") @click.option("--yes", is_flag=True) @click.pass_context @with_context_env def rm(ctx, input, layer, yes): if layer is None: kind = "datasource" else: kind = "layer" if not yes: click.confirm("The {} will be removed. Are you sure?".format(kind), abort=True) try: fiona.remove(input, layer=layer) except Exception: logger.exception("Failed to remove {}.".format(kind)) raise click.Abort() Fiona-1.8.21/fiona/gdal.pxi000066400000000000000000000560571420023252700153760ustar00rootroot00000000000000# GDAL API definitions. from libc.stdio cimport FILE cdef extern from "cpl_conv.h" nogil: void *CPLMalloc(size_t) void CPLFree(void* ptr) void CPLSetThreadLocalConfigOption(const char* key, const char* val) void CPLSetConfigOption(const char* key, const char* val) const char* CPLGetConfigOption(const char* key, const char* default) const char *CPLFindFile(const char *pszClass, const char *pszBasename) cdef extern from "cpl_error.h" nogil: ctypedef enum CPLErr: CE_None CE_Debug CE_Warning CE_Failure CE_Fatal # CPLErrorNum eludes me at the moment, I'm calling it 'int' # for now. ctypedef void (*CPLErrorHandler)(CPLErr, int, const char*) void CPLErrorReset() int CPLGetLastErrorNo() const char* CPLGetLastErrorMsg() CPLErr CPLGetLastErrorType() void CPLPushErrorHandler(CPLErrorHandler handler) void CPLPopErrorHandler() cdef extern from "cpl_string.h" nogil: int CSLCount(char **papszStrList) char **CSLAddString(char **strlist, const char *string) char **CSLAddNameValue(char **papszStrList, const char *pszName, const char *pszValue) char **CSLDuplicate(char **papszStrList) int CSLFindName(char **papszStrList, const char *pszName) int CSLFetchBoolean(char **papszStrList, const char *pszName, int default) const char *CSLFetchNameValue(char **papszStrList, const char *pszName) char **CSLSetNameValue(char **list, char *name, char *val) void CSLDestroy(char **list) char **CSLMerge(char **first, char **second) cdef extern from "cpl_vsi.h" nogil: ctypedef int vsi_l_offset ctypedef FILE VSILFILE unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, int take_ownership) VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, vsi_l_offset data_len, int take_ownership) VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) cdef extern from "ogr_srs_api.h" nogil: ctypedef void * OGRCoordinateTransformationH ctypedef void * OGRSpatialReferenceH OGRCoordinateTransformationH OCTNewCoordinateTransformation( OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation( OGRCoordinateTransformationH source) int OCTTransform(OGRCoordinateTransformationH ct, int nCount, double *x, double *y, double *z) int OSRAutoIdentifyEPSG(OGRSpatialReferenceH srs) void OSRCleanup() OGRSpatialReferenceH OSRClone(OGRSpatialReferenceH srs) int OSRExportToProj4(OGRSpatialReferenceH srs, char **params) int OSRExportToWkt(OGRSpatialReferenceH srs, char **params) const char *OSRGetAuthorityName(OGRSpatialReferenceH srs, const char *key) const char *OSRGetAuthorityCode(OGRSpatialReferenceH srs, const char *key) int OSRImportFromEPSG(OGRSpatialReferenceH srs, int code) int OSRImportFromProj4(OGRSpatialReferenceH srs, const char *proj) int OSRIsGeographic(OGRSpatialReferenceH srs) int OSRIsProjected(OGRSpatialReferenceH srs) int OSRIsSame(OGRSpatialReferenceH srs1, OGRSpatialReferenceH srs2) OGRSpatialReferenceH OSRNewSpatialReference(const char *wkt) void OSRRelease(OGRSpatialReferenceH srs) int OSRSetFromUserInput(OGRSpatialReferenceH srs, const char *input) cdef extern from "gdal.h" nogil: ctypedef void * GDALMajorObjectH ctypedef void * GDALDatasetH ctypedef void * GDALRasterBandH ctypedef void * GDALDriverH ctypedef void * GDALColorTableH ctypedef void * GDALRasterAttributeTableH ctypedef void * GDALAsyncReaderH ctypedef long long GSpacing ctypedef unsigned long long GIntBig ctypedef enum GDALDataType: GDT_Unknown GDT_Byte GDT_UInt16 GDT_Int16 GDT_UInt32 GDT_Int32 GDT_Float32 GDT_Float64 GDT_CInt16 GDT_CInt32 GDT_CFloat32 GDT_CFloat64 GDT_TypeCount ctypedef enum GDALAccess: GA_ReadOnly GA_Update ctypedef enum GDALRWFlag: GF_Read GF_Write ctypedef enum GDALRIOResampleAlg: GRIORA_NearestNeighbour GRIORA_Bilinear GRIORA_Cubic, GRIORA_CubicSpline GRIORA_Lanczos GRIORA_Average GRIORA_Mode GRIORA_Gauss ctypedef enum GDALColorInterp: GCI_Undefined GCI_GrayIndex GCI_PaletteIndex GCI_RedBand GCI_GreenBand GCI_BlueBand GCI_AlphaBand GCI_HueBand GCI_SaturationBand GCI_LightnessBand GCI_CyanBand GCI_YCbCr_YBand GCI_YCbCr_CbBand GCI_YCbCr_CrBand GCI_Max ctypedef struct GDALColorEntry: short c1 short c2 short c3 short c4 ctypedef struct GDAL_GCP: char *pszId char *pszInfo double dfGCPPixel double dfGCPLine double dfGCPX double dfGCPY double dfGCPZ void GDALAllRegister() void GDALDestroyDriverManager() int GDALGetDriverCount() GDALDriverH GDALGetDriver(int i) const char *GDALGetDriverShortName(GDALDriverH driver) const char *GDALGetDriverLongName(GDALDriverH driver) const char* GDALGetDescription(GDALMajorObjectH obj) void GDALSetDescription(GDALMajorObjectH obj, const char *text) GDALDriverH GDALGetDriverByName(const char *name) GDALDatasetH GDALOpen(const char *filename, GDALAccess access) # except -1 GDALDatasetH GDALOpenShared(const char *filename, GDALAccess access) # except -1 void GDALFlushCache(GDALDatasetH hds) void GDALClose(GDALDatasetH hds) GDALDriverH GDALGetDatasetDriver(GDALDatasetH hds) int GDALGetGeoTransform(GDALDatasetH hds, double *transform) const char *GDALGetProjectionRef(GDALDatasetH hds) int GDALGetRasterXSize(GDALDatasetH hds) int GDALGetRasterYSize(GDALDatasetH hds) int GDALGetRasterCount(GDALDatasetH hds) GDALRasterBandH GDALGetRasterBand(GDALDatasetH hds, int num) GDALRasterBandH GDALGetOverview(GDALRasterBandH hband, int num) int GDALGetRasterBandXSize(GDALRasterBandH hband) int GDALGetRasterBandYSize(GDALRasterBandH hband) const char *GDALGetRasterUnitType(GDALRasterBandH hband) CPLErr GDALSetRasterUnitType(GDALRasterBandH hband, const char *val) int GDALSetGeoTransform(GDALDatasetH hds, double *transform) int GDALSetProjection(GDALDatasetH hds, const char *wkt) void GDALGetBlockSize(GDALRasterBandH , int *xsize, int *ysize) int GDALGetRasterDataType(GDALRasterBandH band) double GDALGetRasterNoDataValue(GDALRasterBandH band, int *success) int GDALSetRasterNoDataValue(GDALRasterBandH band, double value) int GDALDatasetRasterIO(GDALRasterBandH band, int, int xoff, int yoff, int xsize, int ysize, void *buffer, int width, int height, int, int count, int *bmap, int poff, int loff, int boff) int GDALRasterIO(GDALRasterBandH band, int, int xoff, int yoff, int xsize, int ysize, void *buffer, int width, int height, int, int poff, int loff) int GDALFillRaster(GDALRasterBandH band, double rvalue, double ivalue) GDALDatasetH GDALCreate(GDALDriverH driver, const char *path, int width, int height, int nbands, GDALDataType dtype, const char **options) GDALDatasetH GDALCreateCopy(GDALDriverH driver, const char *path, GDALDatasetH hds, int strict, char **options, void *progress_func, void *progress_data) char** GDALGetMetadata(GDALMajorObjectH obj, const char *pszDomain) int GDALSetMetadata(GDALMajorObjectH obj, char **papszMD, const char *pszDomain) const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) int GDALSetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszValue, const char *pszDomain) const GDALColorEntry *GDALGetColorEntry(GDALColorTableH table, int) void GDALSetColorEntry(GDALColorTableH table, int i, const GDALColorEntry *poEntry) int GDALSetRasterColorTable(GDALRasterBandH band, GDALColorTableH table) GDALColorTableH GDALGetRasterColorTable(GDALRasterBandH band) GDALColorTableH GDALCreateColorTable(int) void GDALDestroyColorTable(GDALColorTableH table) int GDALGetColorEntryCount(GDALColorTableH table) int GDALGetRasterColorInterpretation(GDALRasterBandH band) int GDALSetRasterColorInterpretation(GDALRasterBandH band, GDALColorInterp) int GDALGetMaskFlags(GDALRasterBandH band) int GDALCreateDatasetMaskBand(GDALDatasetH hds, int flags) void *GDALGetMaskBand(GDALRasterBandH band) int GDALCreateMaskBand(GDALDatasetH hds, int flags) int GDALGetOverviewCount(GDALRasterBandH band) int GDALBuildOverviews(GDALDatasetH hds, const char *resampling, int nOverviews, int *overviews, int nBands, int *bands, void *progress_func, void *progress_data) int GDALCheckVersion(int nVersionMajor, int nVersionMinor, const char *pszCallingComponentName) const char* GDALVersionInfo(const char *pszRequest) CPLErr GDALSetGCPs(GDALDatasetH hDS, int nGCPCount, const GDAL_GCP *pasGCPList, const char *pszGCPProjection) const GDAL_GCP *GDALGetGCPs(GDALDatasetH hDS) int GDALGetGCPCount(GDALDatasetH hDS) const char *GDALGetGCPProjection(GDALDatasetH hDS) int GDALGetCacheMax() void GDALSetCacheMax(int nBytes) GIntBig GDALGetCacheMax64() void GDALSetCacheMax64(GIntBig nBytes) CPLErr GDALDeleteDataset(GDALDriverH, const char *) char** GDALGetFileList(GDALDatasetH hDS) CPLErr GDALCopyDatasetFiles (GDALDriverH hDriver, const char * pszNewName, const char * pszOldName) cdef extern from "ogr_api.h" nogil: ctypedef void * OGRLayerH ctypedef void * OGRDataSourceH ctypedef void * OGRSFDriverH ctypedef void * OGRFieldDefnH ctypedef void * OGRFeatureDefnH ctypedef void * OGRFeatureH ctypedef void * OGRGeometryH ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY void OGRRegisterAll() void OGRCleanupAll() int OGRGetDriverCount() char *OGR_Dr_GetName(OGRSFDriverH driver) OGRDataSourceH OGR_Dr_CreateDataSource(OGRSFDriverH driver, const char *path, char **options) int OGR_Dr_DeleteDataSource(OGRSFDriverH driver, const char *path) int OGR_DS_DeleteLayer(OGRDataSourceH datasource, int n) OGRLayerH OGR_DS_CreateLayer(OGRDataSourceH datasource, const char *name, OGRSpatialReferenceH crs, int geomType, char **options) OGRLayerH OGR_DS_ExecuteSQL(OGRDataSourceH, const char *name, OGRGeometryH filter, const char *dialext) void OGR_DS_Destroy(OGRDataSourceH datasource) OGRSFDriverH OGR_DS_GetDriver(OGRLayerH layer_defn) OGRLayerH OGR_DS_GetLayerByName(OGRDataSourceH datasource, const char *name) int OGR_DS_GetLayerCount(OGRDataSourceH datasource) OGRLayerH OGR_DS_GetLayer(OGRDataSourceH datasource, int n) void OGR_DS_ReleaseResultSet(OGRDataSourceH datasource, OGRLayerH results) int OGR_DS_SyncToDisk(OGRDataSourceH datasource) OGRFeatureH OGR_F_Create(OGRFeatureDefnH featuredefn) void OGR_F_Destroy(OGRFeatureH feature) long OGR_F_GetFID(OGRFeatureH feature) int OGR_F_IsFieldSet(OGRFeatureH feature, int n) int OGR_F_GetFieldAsDateTime(OGRFeatureH feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) double OGR_F_GetFieldAsDouble(OGRFeatureH feature, int n) int OGR_F_GetFieldAsInteger(OGRFeatureH feature, int n) const char *OGR_F_GetFieldAsString(OGRFeatureH feature, int n) int OGR_F_GetFieldCount(OGRFeatureH feature) OGRFieldDefnH OGR_F_GetFieldDefnRef(OGRFeatureH feature, int n) int OGR_F_GetFieldIndex(OGRFeatureH feature, const char *name) OGRGeometryH OGR_F_GetGeometryRef(OGRFeatureH feature) void OGR_F_SetFieldDateTime(OGRFeatureH feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble(OGRFeatureH feature, int n, double value) void OGR_F_SetFieldInteger(OGRFeatureH feature, int n, int value) void OGR_F_SetFieldString(OGRFeatureH feature, int n, const char *value) int OGR_F_SetGeometryDirectly(OGRFeatureH feature, OGRGeometryH geometry) OGRFeatureDefnH OGR_FD_Create(const char *name) int OGR_FD_GetFieldCount(OGRFeatureDefnH featuredefn) OGRFieldDefnH OGR_FD_GetFieldDefn(OGRFeatureDefnH featuredefn, int n) int OGR_FD_GetGeomType(OGRFeatureDefnH featuredefn) const char *OGR_FD_GetName(OGRFeatureDefnH featuredefn) OGRFieldDefnH OGR_Fld_Create(const char *name, int fieldtype) void OGR_Fld_Destroy(OGRFieldDefnH) char *OGR_Fld_GetNameRef(OGRFieldDefnH) int OGR_Fld_GetPrecision(OGRFieldDefnH) int OGR_Fld_GetType(OGRFieldDefnH) int OGR_Fld_GetWidth(OGRFieldDefnH) void OGR_Fld_Set(OGRFieldDefnH, const char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision(OGRFieldDefnH, int n) void OGR_Fld_SetWidth(OGRFieldDefnH, int n) OGRErr OGR_G_AddGeometryDirectly(OGRGeometryH geometry, OGRGeometryH part) void OGR_G_AddPoint(OGRGeometryH geometry, double x, double y, double z) void OGR_G_AddPoint_2D(OGRGeometryH geometry, double x, double y) void OGR_G_CloseRings(OGRGeometryH geometry) OGRGeometryH OGR_G_CreateGeometry(int wkbtypecode) OGRGeometryH OGR_G_CreateGeometryFromJson(const char *json) void OGR_G_DestroyGeometry(OGRGeometryH geometry) char *OGR_G_ExportToJson(OGRGeometryH geometry) OGRErr OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension(OGRGeometryH geometry) int OGR_G_GetGeometryCount(OGRGeometryH geometry) const char *OGR_G_GetGeometryName(OGRGeometryH geometry) int OGR_G_GetGeometryType(OGRGeometryH geometry) OGRGeometryH OGR_G_GetGeometryRef(OGRGeometryH geometry, int n) int OGR_G_GetPointCount(OGRGeometryH geometry) double OGR_G_GetX(OGRGeometryH geometry, int n) double OGR_G_GetY(OGRGeometryH geometry, int n) double OGR_G_GetZ(OGRGeometryH geometry, int n) void OGR_G_ImportFromWkb(OGRGeometryH geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize(OGRGeometryH geometry) OGRErr OGR_L_CreateFeature(OGRLayerH layer, OGRFeatureH feature) int OGR_L_CreateField(OGRLayerH layer, OGRFieldDefnH, int flexible) OGRErr OGR_L_GetExtent(OGRLayerH layer, void *extent, int force) OGRFeatureH OGR_L_GetFeature(OGRLayerH layer, int n) int OGR_L_GetFeatureCount(OGRLayerH layer, int m) OGRFeatureDefnH OGR_L_GetLayerDefn(OGRLayerH layer) const char *OGR_L_GetName(OGRLayerH layer) OGRFeatureH OGR_L_GetNextFeature(OGRLayerH layer) OGRGeometryH OGR_L_GetSpatialFilter(OGRLayerH layer) OGRSpatialReferenceH OGR_L_GetSpatialRef(OGRLayerH layer) void OGR_L_ResetReading(OGRLayerH layer) void OGR_L_SetSpatialFilter(OGRLayerH layer, OGRGeometryH geometry) void OGR_L_SetSpatialFilterRect(OGRLayerH layer, double minx, double miny, double maxx, double maxy) int OGR_L_TestCapability(OGRLayerH layer, const char *name) OGRSFDriverH OGRGetDriverByName(const char *) OGRSFDriverH OGRGetDriver(int i) OGRDataSourceH OGROpen(const char *path, int mode, void *x) OGRDataSourceH OGROpenShared(const char *path, int mode, void *x) int OGRReleaseDataSource(OGRDataSourceH datasource) cdef extern from "gdalwarper.h" nogil: ctypedef enum GDALResampleAlg: GRA_NearestNeighbour GRA_Bilinear GRA_Cubic GRA_CubicSpline GRA_Lanczos GRA_Average GRA_Mode ctypedef int (*GDALMaskFunc)( void *pMaskFuncArg, int nBandCount, int eType, int nXOff, int nYOff, int nXSize, int nYSize, unsigned char **papabyImageData, int bMaskIsFloat, void *pMask) ctypedef int (*GDALTransformerFunc)( void *pTransformerArg, int bDstToSrc, int nPointCount, double *x, double *y, double *z, int *panSuccess) ctypedef struct GDALWarpOptions: char **papszWarpOptions double dfWarpMemoryLimit GDALResampleAlg eResampleAlg GDALDataType eWorkingDataType GDALDatasetH hSrcDS GDALDatasetH hDstDS # 0 for all bands int nBandCount # List of source band indexes int *panSrcBands # List of destination band indexes int *panDstBands # The source band so use as an alpha (transparency) value, 0=disabled int nSrcAlphaBand # The dest. band so use as an alpha (transparency) value, 0=disabled int nDstAlphaBand # The "nodata" value real component for each input band, if NULL there isn't one */ double *padfSrcNoDataReal # The "nodata" value imaginary component - may be NULL even if real component is provided. */ double *padfSrcNoDataImag # The "nodata" value real component for each output band, if NULL there isn't one */ double *padfDstNoDataReal # The "nodata" value imaginary component - may be NULL even if real component is provided. */ double *padfDstNoDataImag # GDALProgressFunc() compatible progress reporting function, or NULL if there isn't one. */ void *pfnProgress # Callback argument to be passed to pfnProgress. */ void *pProgressArg # Type of spatial point transformer function */ GDALTransformerFunc pfnTransformer # Handle to image transformer setup structure */ void *pTransformerArg GDALMaskFunc *papfnSrcPerBandValidityMaskFunc void **papSrcPerBandValidityMaskFuncArg GDALMaskFunc pfnSrcValidityMaskFunc void *pSrcValidityMaskFuncArg GDALMaskFunc pfnSrcDensityMaskFunc void *pSrcDensityMaskFuncArg GDALMaskFunc pfnDstDensityMaskFunc void *pDstDensityMaskFuncArg GDALMaskFunc pfnDstValidityMaskFunc void *pDstValidityMaskFuncArg int (*pfnPreWarpChunkProcessor)(void *pKern, void *pArg) void *pPreWarpProcessorArg int (*pfnPostWarpChunkProcessor)(void *pKern, void *pArg) void *pPostWarpProcessorArg # Optional OGRPolygonH for a masking cutline. */ OGRGeometryH hCutline # Optional blending distance to apply across cutline in pixels, default is 0 double dfCutlineBlendDist GDALWarpOptions *GDALCreateWarpOptions() void GDALDestroyWarpOptions(GDALWarpOptions *options) GDALDatasetH GDALAutoCreateWarpedVRT( GDALDatasetH hSrcDS, const char *pszSrcWKT, const char *pszDstWKT, GDALResampleAlg eResampleAlg, double dfMaxError, const GDALWarpOptions *psOptionsIn) GDALDatasetH GDALCreateWarpedVRT( GDALDatasetH hSrcDS, int nPixels, int nLines, double *padfGeoTransform, const GDALWarpOptions *psOptionsIn) cdef extern from "gdal_alg.h" nogil: int GDALPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, OGRLayerH layer, int fidx, char **options, void *progress_func, void *progress_data) int GDALFPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, OGRLayerH layer, int fidx, char **options, void *progress_func, void *progress_data) int GDALSieveFilter(GDALRasterBandH src_band, GDALRasterBandH mask_band, GDALRasterBandH dst_band, int size, int connectivity, char **options, void *progress_func, void *progress_data) int GDALRasterizeGeometries(GDALDatasetH hds, int band_count, int *dst_bands, int geom_count, OGRGeometryH *geometries, GDALTransformerFunc transform_func, void *transform, double *pixel_values, char **options, void *progress_func, void *progress_data) void *GDALCreateGenImgProjTransformer(GDALDatasetH src_hds, const char *pszSrcWKT, GDALDatasetH dst_hds, const char *pszDstWKT, int bGCPUseOK, double dfGCPErrorThreshold, int nOrder) void *GDALCreateGenImgProjTransformer2(GDALDatasetH src_hds, GDALDatasetH dst_hds, char **options) void *GDALCreateGenImgProjTransformer3( const char *pszSrcWKT, const double *padfSrcGeoTransform, const char *pszDstWKT, const double *padfDstGeoTransform) void GDALSetGenImgProjTransformerDstGeoTransform(void *hTransformArg, double *padfGeoTransform) int GDALGenImgProjTransform(void *pTransformArg, int bDstToSrc, int nPointCount, double *x, double *y, double *z, int *panSuccess) void GDALDestroyGenImgProjTransformer(void *) void *GDALCreateApproxTransformer(GDALTransformerFunc pfnRawTransformer, void *pRawTransformerArg, double dfMaxError) int GDALApproxTransform(void *pTransformArg, int bDstToSrc, int npoints, double *x, double *y, double *z, int *panSuccess) void GDALDestroyApproxTransformer(void *) void GDALApproxTransformerOwnsSubtransformer(void *, int) int GDALFillNodata(GDALRasterBandH dst_band, GDALRasterBandH mask_band, double max_search_distance, int deprecated, int smoothing_iterations, char **options, void *progress_func, void *progress_data) int GDALChecksumImage(GDALRasterBandH band, int xoff, int yoff, int width, int height) int GDALSuggestedWarpOutput2( GDALDatasetH hSrcDS, GDALTransformerFunc pfnRawTransformer, void * pTransformArg, double * padfGeoTransformOut, int * pnPixels, int * pnLines, double * padfExtent, int nOptions) cdef extern from "ogr_core.h" nogil: char *OGRGeometryTypeToName(int type) Fiona-1.8.21/fiona/inspector.py000066400000000000000000000017141420023252700163130ustar00rootroot00000000000000 import code import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('fiona.inspector') def main(srcfile): with fiona.drivers(): with fiona.open(srcfile) as src: code.interact( 'Fiona %s Interactive Inspector (Python %s)\n' 'Type "src.schema", "next(src)", or "help(src)" ' 'for more information.' % ( fiona.__version__, '.'.join(map(str, sys.version_info[:3]))), local=locals()) return 1 if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( prog="python -m fiona.inspector", description="Open a data file and drop into an interactive interpreter") parser.add_argument( 'src', metavar='FILE', help="Input dataset file name") args = parser.parse_args() main(args.src) Fiona-1.8.21/fiona/io.py000066400000000000000000000073111420023252700147130ustar00rootroot00000000000000"""Classes capable of reading and writing collections """ from collections import OrderedDict import logging import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona.ogrext import MemoryFileBase from fiona.collection import Collection log = logging.getLogger(__name__) class MemoryFile(MemoryFileBase): """A BytesIO-like object, backed by an in-memory file. This allows formatted files to be read and written without I/O. A MemoryFile created with initial bytes becomes immutable. A MemoryFile created without initial bytes may be written to using either file-like or dataset interfaces. Examples -------- """ def __init__(self, file_or_bytes=None, filename=None, ext=""): if ext and not ext.startswith("."): ext = "." + ext super(MemoryFile, self).__init__( file_or_bytes=file_or_bytes, filename=filename, ext=ext) def open(self, driver=None, schema=None, crs=None, encoding=None, layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, **kwargs): """Open the file and return a Fiona collection object. If data has already been written, the file is opened in 'r' mode. Otherwise, the file is opened in 'w' mode. Parameters ---------- Note well that there is no `path` parameter: a `MemoryFile` contains a single dataset and there is no need to specify a path. Other parameters are optional and have the same semantics as the parameters of `fiona.open()`. """ if self.closed: raise IOError("I/O operation on closed file.") if not self.exists(): self._ensure_extension(driver) this_schema = schema.copy() this_schema["properties"] = OrderedDict(schema["properties"]) return Collection( self.name, "w", crs=crs, driver=driver, schema=this_schema, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt, **kwargs ) elif self.mode in ("r", "r+"): return Collection( self.name, "r", driver=driver, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, **kwargs ) def __enter__(self): return self def __exit__(self, *args, **kwargs): self.close() class ZipMemoryFile(MemoryFile): """A read-only BytesIO-like object backed by an in-memory zip file. This allows a zip file containing formatted files to be read without I/O. """ def __init__(self, file_or_bytes=None): super(ZipMemoryFile, self).__init__(file_or_bytes, ext=".zip") def open(self, path=None, driver=None, encoding=None, layer=None, enabled_drivers=None, **kwargs): """Open a dataset within the zipped stream. Parameters ---------- path : str Path to a dataset in the zip file, relative to the root of the archive. Returns ------- A Fiona collection object """ if self.closed: raise IOError("I/O operation on closed file.") if path: vsi_path = '/vsizip{0}/{1}'.format(self.name, path.lstrip('/')) else: vsi_path = '/vsizip{0}'.format(self.name) return Collection(vsi_path, 'r', driver=driver, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, **kwargs) Fiona-1.8.21/fiona/logutils.py000066400000000000000000000016071420023252700161500ustar00rootroot00000000000000"""Logging helper classes.""" import logging class FieldSkipLogFilter(logging.Filter): """Filter field skip log messges. At most, one message per field skipped per loop will be passed. """ def __init__(self, name=""): super(FieldSkipLogFilter, self).__init__(name) self.seen_msgs = set() def filter(self, record): """Pass record if not seen.""" msg = record.getMessage() if msg.startswith("Skipping field"): retval = msg not in self.seen_msgs self.seen_msgs.add(msg) return retval else: return 1 class LogFiltering(object): def __init__(self, logger, filter): self.logger = logger self.filter = filter def __enter__(self): self.logger.addFilter(self.filter) def __exit__(self, *args, **kwargs): self.logger.removeFilter(self.filter) Fiona-1.8.21/fiona/ogrext.pyx000066400000000000000000002105441420023252700160100ustar00rootroot00000000000000# These are extension functions and classes using the OGR C API. from __future__ import absolute_import import datetime import json import locale import logging import os import warnings import math from uuid import uuid4 from collections import namedtuple from six import integer_types, string_types, text_type from fiona._shim cimport * from fiona._geometry cimport ( GeomBuilder, OGRGeomBuilder, geometry_type_code, normalize_geometry_type_code, base_geometry_type_code) from fiona._err cimport exc_wrap_int, exc_wrap_pointer, exc_wrap_vsilfile, get_last_error_msg import fiona from fiona._env import get_gdal_version_num, calc_gdal_version_num, get_gdal_version_tuple from fiona._err import cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_OpenFailedError from fiona._geometry import GEOMETRY_TYPES from fiona import compat from fiona.env import Env from fiona.errors import ( DriverError, DriverIOError, SchemaError, CRSError, FionaValueError, TransactionError, GeometryTypeValidationError, DatasetDeleteError, FeatureWarning, FionaDeprecationWarning) from fiona.compat import OrderedDict from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType from fiona.schema import FIELD_TYPES, FIELD_TYPES_MAP, normalize_field_type from fiona.path import vsi_path from fiona._shim cimport is_field_null, osr_get_name, osr_set_traditional_axis_mapping_strategy from libc.stdlib cimport malloc, free from libc.string cimport strcmp from cpython cimport PyBytes_FromStringAndSize, PyBytes_AsString from fiona.drvsupport import _driver_supports_timezones cdef extern from "ogr_api.h" nogil: ctypedef void * OGRLayerH ctypedef void * OGRDataSourceH ctypedef void * OGRSFDriverH ctypedef void * OGRFieldDefnH ctypedef void * OGRFeatureDefnH ctypedef void * OGRFeatureH ctypedef void * OGRGeometryH log = logging.getLogger(__name__) DEFAULT_TRANSACTION_SIZE = 20000 # OGR Driver capability cdef const char * ODrCCreateDataSource = "CreateDataSource" cdef const char * ODrCDeleteDataSource = "DeleteDataSource" # OGR Layer capability cdef const char * OLC_RANDOMREAD = "RandomRead" cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" cdef const char * OLC_RANDOMWRITE = "RandomWrite" cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" cdef const char * OLC_CREATEFIELD = "CreateField" cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" cdef const char * OLC_DELETEFIELD = "DeleteField" cdef const char * OLC_REORDERFIELDS = "ReorderFields" cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" cdef const char * OLC_DELETEFEATURE = "DeleteFeature" cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" cdef const char * OLC_TRANSACTIONS = "Transactions" cdef const char * OLC_IGNOREFIELDS = "IgnoreFields" # OGR integer error types. OGRERR_NONE = 0 OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ OGRERR_NOT_ENOUGH_MEMORY = 2 OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 OGRERR_UNSUPPORTED_OPERATION = 4 OGRERR_CORRUPT_DATA = 5 OGRERR_FAILURE = 6 OGRERR_UNSUPPORTED_SRS = 7 OGRERR_INVALID_HANDLE = 8 def _explode(coords): """Explode a GeoJSON geometry's coordinates object and yield coordinate tuples. As long as the input is conforming, the type of the geometry doesn't matter.""" for e in coords: if isinstance(e, (float, int)): yield coords break else: for f in _explode(e): yield f def _bounds(geometry): """Bounding box of a GeoJSON geometry""" try: xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) except (KeyError, TypeError): return None cdef int GDAL_VERSION_NUM = get_gdal_version_num() class TZ(datetime.tzinfo): def __init__(self, minutes): self.minutes = minutes def utcoffset(self, dt): return datetime.timedelta(minutes=self.minutes) # Feature extension classes and functions follow. cdef class FeatureBuilder: """Build Fiona features from OGR feature pointers. No OGR objects are allocated by this function and the feature argument is not destroyed. """ cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None, ignore_fields=None, ignore_geometry=False): """Build a Fiona feature object from an OGR feature Parameters ---------- feature : void * The OGR feature # TODO: use a real typedef encoding : str The encoding of OGR feature attributes bbox : bool Not used driver : str OGR format driver name like 'GeoJSON' ignore_fields : sequence A sequence of field names that will be ignored and omitted in the Fiona feature properties ignore_geometry : bool Flag for whether the OGR geometry field is to be ignored Returns ------- dict """ cdef void *fdefn = NULL cdef int i cdef unsigned char *data = NULL cdef int l cdef int retval cdef int fieldsubtype cdef const char *key_c = NULL # Parameters for get_field_as_datetime cdef int y = 0 cdef int m = 0 cdef int d = 0 cdef int hh = 0 cdef int mm = 0 cdef float fss = 0.0 cdef int tz = 0 # Skeleton of the feature to be returned. fid = OGR_F_GetFID(feature) props = OrderedDict() fiona_feature = { "type": "Feature", "id": str(fid), "properties": props, } ignore_fields = set(ignore_fields or []) # Iterate over the fields of the OGR feature. for i in range(OGR_F_GetFieldCount(feature)): fdefn = OGR_F_GetFieldDefnRef(feature, i) if fdefn == NULL: raise ValueError("NULL field definition at index {}".format(i)) key_c = OGR_Fld_GetNameRef(fdefn) if key_c == NULL: raise ValueError("NULL field name reference at index {}".format(i)) key_b = key_c key = key_b.decode(encoding) if not key: warnings.warn("Empty field name at index {}".format(i)) if key in ignore_fields: continue fieldtypename = FIELD_TYPES[OGR_Fld_GetType(fdefn)] fieldsubtype = get_field_subtype(fdefn) if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, OGR_Fld_GetType(fdefn)) continue # TODO: other types fieldtype = FIELD_TYPES_MAP[fieldtypename] if is_field_null(feature, i): props[key] = None elif fieldtypename is 'int32': if fieldsubtype == OFSTBoolean: props[key] = bool(OGR_F_GetFieldAsInteger(feature, i)) else: props[key] = OGR_F_GetFieldAsInteger(feature, i) elif fieldtype is int: if fieldsubtype == OFSTBoolean: props[key] = bool(OGR_F_GetFieldAsInteger64(feature, i)) else: props[key] = OGR_F_GetFieldAsInteger64(feature, i) elif fieldtype is float: props[key] = OGR_F_GetFieldAsDouble(feature, i) elif fieldtype is text_type: try: val = OGR_F_GetFieldAsString(feature, i) val = val.decode(encoding) except UnicodeDecodeError: log.warning( "Failed to decode %s using %s codec", val, encoding) # Does the text contain a JSON object? Let's check. # Let's check as cheaply as we can. if driver == 'GeoJSON' and val.startswith('{'): try: val = json.loads(val) except ValueError as err: log.warning(str(err)) # Now add to the properties object. props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): retval = get_field_as_datetime(feature, i, &y, &m, &d, &hh, &mm, &fss, &tz) ms, ss = math.modf(fss) ss = int(ss) ms = int(round(ms * 10**6)) # OGR_F_GetFieldAsDateTimeEx: (0=unknown, 1=localtime, 100=GMT, see data model for details) # CPLParseRFC822DateTime: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL tzinfo = None if tz > 1: tz_minutes = (tz - 100) * 15 tzinfo = TZ(tz_minutes) try: if fieldtype is FionaDateType: props[key] = datetime.date(y, m, d).isoformat() elif fieldtype is FionaTimeType: props[key] = datetime.time(hh, mm, ss, ms, tzinfo).isoformat() else: props[key] = datetime.datetime(y, m, d, hh, mm, ss, ms, tzinfo).isoformat() except ValueError as err: log.exception(err) props[key] = None elif fieldtype is bytes: data = OGR_F_GetFieldAsBinary(feature, i, &l) props[key] = data[:l] else: log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) props[key] = None cdef void *cogr_geometry = NULL cdef void *org_geometry = NULL if not ignore_geometry: cogr_geometry = OGR_F_GetGeometryRef(feature) if cogr_geometry is not NULL: code = base_geometry_type_code(OGR_G_GetGeometryType(cogr_geometry)) if 8 <= code <= 14: # Curves. cogr_geometry = get_linear_geometry(cogr_geometry) geom = GeomBuilder().build(cogr_geometry) OGR_G_DestroyGeometry(cogr_geometry) elif 15 <= code <= 17: # We steal the geometry: the geometry of the in-memory feature is now null # and we are responsible for cogr_geometry. org_geometry = OGR_F_StealGeometry(feature) if code in (15, 16): cogr_geometry = OGR_G_ForceToMultiPolygon(org_geometry) elif code == 17: cogr_geometry = OGR_G_ForceToPolygon(org_geometry) geom = GeomBuilder().build(cogr_geometry) OGR_G_DestroyGeometry(cogr_geometry) else: geom = GeomBuilder().build(cogr_geometry) fiona_feature["geometry"] = geom else: fiona_feature["geometry"] = None return fiona_feature cdef class OGRFeatureBuilder: """Builds an OGR Feature from a Fiona feature mapping. Allocates one OGR Feature which should be destroyed by the caller. Borrows a layer definition from the collection. """ cdef void * build(self, feature, collection) except NULL: cdef void *cogr_geometry = NULL cdef const char *string_c = NULL cdef WritingSession session session = collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") cdef void *cogr_featuredefn = OGR_L_GetLayerDefn(cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") cdef void *cogr_feature = OGR_F_Create(cogr_featuredefn) if cogr_feature == NULL: raise ValueError("Null feature") if feature['geometry'] is not None: cogr_geometry = OGRGeomBuilder().build( feature['geometry']) exc_wrap_int(OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry)) # OGR_F_SetFieldString takes encoded strings ('bytes' in Python 3). encoding = session._get_internal_encoding() for key, value in feature['properties'].items(): log.debug( "Looking up %s in %s", key, repr(session._schema_mapping)) ogr_key = session._schema_mapping[key] schema_type = normalize_field_type(collection.schema['properties'][key]) log.debug("Normalizing schema type for key %r in schema %r to %r", key, collection.schema['properties'], schema_type) try: key_bytes = ogr_key.encode(encoding) except UnicodeDecodeError: log.warning("Failed to encode %s using %s codec", key, encoding) key_bytes = ogr_key key_c = key_bytes i = OGR_F_GetFieldIndex(cogr_feature, key_c) if i < 0: continue # Special case: serialize dicts to assist OGR. if isinstance(value, dict): value = json.dumps(value) # Continue over the standard OGR types. if isinstance(value, integer_types): log.debug("Setting field %r, type %r, to value %r", i, schema_type, value) if schema_type == 'int32': OGR_F_SetFieldInteger(cogr_feature, i, value) else: OGR_F_SetFieldInteger64(cogr_feature, i, value) elif isinstance(value, float): OGR_F_SetFieldDouble(cogr_feature, i, value) elif schema_type in ['date', 'time', 'datetime'] and value is not None: if isinstance(value, string_types): if schema_type == 'date': y, m, d, hh, mm, ss, ms, tz = parse_date(value) elif schema_type == 'time': y, m, d, hh, mm, ss, ms, tz = parse_time(value) else: y, m, d, hh, mm, ss, ms, tz = parse_datetime(value) elif (isinstance(value, datetime.date) and schema_type == 'date'): y, m, d = value.year, value.month, value.day hh = mm = ss = ms = 0 tz = None elif (isinstance(value, datetime.datetime) and schema_type == 'datetime'): y, m, d = value.year, value.month, value.day hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond if value.utcoffset() is None: tz = None else: tz = value.utcoffset().total_seconds() / 60 elif (isinstance(value, datetime.time) and schema_type == 'time'): y = m = d = 0 hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond if value.utcoffset() is None: tz = None else: tz = value.utcoffset().total_seconds() / 60 # Convert to UTC if driver does not support timezones if tz is not None and not _driver_supports_timezones(collection.driver, schema_type): if schema_type == 'datetime': d_tz = datetime.datetime(y, m, d, hh, mm, ss, int(ms), TZ(tz)) d_utc = d_tz - d_tz.utcoffset() y, m, d = d_utc.year, d_utc.month, d_utc.day hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond tz = 0 del d_utc, d_tz elif schema_type == 'time': d_tz = datetime.datetime(1900, 1, 1, hh, mm, ss, int(ms), TZ(tz)) d_utc = d_tz - d_tz.utcoffset() y = m = d = 0 hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond tz = 0 del d_utc, d_tz # tzinfo: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL if tz is not None: tzinfo = int(tz / 15.0 + 100) else: tzinfo = 0 # Add microseconds to seconds ss += ms / 10**6 set_field_datetime(cogr_feature, i, y, m, d, hh, mm, ss, tzinfo) elif isinstance(value, bytes) and schema_type == "bytes": string_c = value OGR_F_SetFieldBinary(cogr_feature, i, len(value), string_c) elif isinstance(value, string_types): try: value_bytes = value.encode(encoding) except UnicodeDecodeError: log.warning( "Failed to encode %s using %s codec", value, encoding) value_bytes = value string_c = value_bytes OGR_F_SetFieldString(cogr_feature, i, string_c) elif value is None: set_field_null(cogr_feature, i) else: raise ValueError("Invalid field type %s" % type(value)) log.debug("Set field %s: %r" % (key, value)) return cogr_feature cdef _deleteOgrFeature(void *cogr_feature): """Delete an OGR feature""" if cogr_feature is not NULL: OGR_F_Destroy(cogr_feature) cogr_feature = NULL def featureRT(feature, collection): # For testing purposes only, leaks the JSON data cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) cdef void *cogr_geometry = OGR_F_GetGeometryRef(cogr_feature) if cogr_geometry == NULL: raise ValueError("Null geometry") result = FeatureBuilder().build( cogr_feature, encoding='utf-8', bbox=False, driver=collection.driver ) _deleteOgrFeature(cogr_feature) return result # Collection-related extension classes and functions cdef class Session: cdef void *cogr_ds cdef void *cogr_layer cdef object _fileencoding cdef object _encoding cdef object collection cdef bint cursor_interrupted def __init__(self): self.cogr_ds = NULL self.cogr_layer = NULL self._fileencoding = None self._encoding = None self.cursor_interrupted = False def __dealloc__(self): self.stop() def start(self, collection, **kwargs): cdef const char *path_c = NULL cdef const char *name_c = NULL cdef void *drv = NULL cdef void *ds = NULL cdef char **ignore_fields = NULL path_b = collection.path.encode('utf-8') path_c = path_b self._fileencoding = kwargs.get('encoding') or collection.encoding # We have two ways of specifying drivers to try. Resolve the # values into a single set of driver short names. if collection._driver: drivers = set([collection._driver]) elif collection.enabled_drivers: drivers = set(collection.enabled_drivers) else: drivers = None encoding = kwargs.pop('encoding', None) if encoding: kwargs['encoding'] = encoding.upper() self.cogr_ds = gdal_open_vector(path_c, 0, drivers, kwargs) if isinstance(collection.name, string_types): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = GDALDatasetGetLayerByName(self.cogr_ds, name_c) elif isinstance(collection.name, int): self.cogr_layer = GDALDatasetGetLayer(self.cogr_ds, collection.name) name_c = OGR_L_GetName(self.cogr_layer) name_b = name_c collection.name = name_b.decode('utf-8') if self.cogr_layer == NULL: raise ValueError("Null layer: " + repr(collection.name)) encoding = self._get_internal_encoding() if collection.ignore_fields: if not OGR_L_TestCapability(self.cogr_layer, OLC_IGNOREFIELDS): raise DriverError("Driver does not support ignore_fields") try: for name in collection.ignore_fields: try: name_b = name.encode(encoding) except AttributeError: raise TypeError("Ignored field \"{}\" has type \"{}\", expected string".format(name, name.__class__.__name__)) ignore_fields = CSLAddString(ignore_fields, name_b) OGR_L_SetIgnoredFields(self.cogr_layer, ignore_fields) finally: CSLDestroy(ignore_fields) self.collection = collection cpdef stop(self): self.cogr_layer = NULL if self.cogr_ds != NULL: GDALClose(self.cogr_ds) self.cogr_ds = NULL def get_fileencoding(self): """DEPRECATED""" warnings.warn("get_fileencoding is deprecated and will be removed in a future version.", FionaDeprecationWarning) return self._fileencoding def _get_fallback_encoding(self): """Determine a format-specific fallback encoding to use when using OGR_F functions Parameters ---------- None Returns ------- str """ if "Shapefile" in self.get_driver(): return 'iso-8859-1' else: return locale.getpreferredencoding() def _get_internal_encoding(self): """Determine the encoding to use when use OGR_F functions Parameters ---------- None Returns ------- str Notes ----- If the layer implements RFC 23 support for UTF-8, the return value will be 'utf-8' and callers can be certain that this is correct. If the layer does not have the OLC_STRINGSASUTF8 capability marker, it is not possible to know exactly what the internal encoding is and this method returns best guesses. That means ISO-8859-1 for shapefiles and the locale's preferred encoding for other formats such as CSV files. """ if OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8): return 'utf-8' else: return self._fileencoding or self._get_fallback_encoding() def get_length(self): if self.cogr_layer == NULL: raise ValueError("Null layer") return self._get_feature_count(0) def get_driver(self): cdef void *cogr_driver = GDALGetDatasetDriver(self.cogr_ds) if cogr_driver == NULL: raise ValueError("Null driver") cdef const char *name = OGR_Dr_GetName(cogr_driver) driver_name = name return driver_name.decode() def get_schema(self): cdef int i cdef int n cdef void *cogr_featuredefn = NULL cdef void *cogr_fielddefn = NULL cdef const char *key_c props = [] if self.cogr_layer == NULL: raise ValueError("Null layer") if self.collection.ignore_fields: ignore_fields = self.collection.ignore_fields else: ignore_fields = set() cogr_featuredefn = OGR_L_GetLayerDefn(self.cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") encoding = self._get_internal_encoding() n = OGR_FD_GetFieldCount(cogr_featuredefn) for i from 0 <= i < n: cogr_fielddefn = OGR_FD_GetFieldDefn(cogr_featuredefn, i) if cogr_fielddefn == NULL: raise ValueError("NULL field definition at index {}".format(i)) key_c = OGR_Fld_GetNameRef(cogr_fielddefn) if key_c == NULL: raise ValueError("NULL field name reference at index {}".format(i)) key_b = key_c key = key_b.decode(encoding) if not key: warnings.warn("Empty field name at index {}".format(i), FeatureWarning) if key in ignore_fields: log.debug("By request, ignoring field %r", key) continue fieldtypename = FIELD_TYPES[OGR_Fld_GetType(cogr_fielddefn)] if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, OGR_Fld_GetType(cogr_fielddefn)) continue val = fieldtypename if fieldtypename == 'float': fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 24: fmt = ":%d" % width precision = OGR_Fld_GetPrecision(cogr_fielddefn) if precision: # and precision != 15: fmt += ".%d" % precision val = "float" + fmt elif fieldtypename in ('int32', 'int64'): fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) if width: fmt = ":%d" % width val = 'int' + fmt elif fieldtypename == 'str': fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) if width: fmt = ":%d" % width val = fieldtypename + fmt props.append((key, val)) ret = {"properties": OrderedDict(props)} if not self.collection.ignore_geometry: code = normalize_geometry_type_code( OGR_FD_GetGeomType(cogr_featuredefn)) ret["geometry"] = GEOMETRY_TYPES[code] return ret def get_crs(self): """Get the layer's CRS Returns ------- CRS """ cdef char *proj_c = NULL cdef const char *auth_key = NULL cdef const char *auth_val = NULL cdef void *cogr_crs = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") try: cogr_crs = exc_wrap_pointer(OGR_L_GetSpatialRef(self.cogr_layer)) # TODO: we don't intend to use try/except for flow control # this is a work around for a GDAL issue. except FionaNullPointerError: log.debug("Layer has no coordinate system") if cogr_crs is not NULL: log.debug("Got coordinate system") crs = {} try: retval = OSRAutoIdentifyEPSG(cogr_crs) if retval > 0: log.info("Failed to auto identify EPSG: %d", retval) try: auth_key = exc_wrap_pointer(OSRGetAuthorityName(cogr_crs, NULL)) auth_val = exc_wrap_pointer(OSRGetAuthorityCode(cogr_crs, NULL)) except CPLE_BaseError as exc: log.debug("{}".format(exc)) if auth_key != NULL and auth_val != NULL: key_b = auth_key key = key_b.decode('utf-8') if key == 'EPSG': val_b = auth_val val = val_b.decode('utf-8') crs['init'] = "epsg:" + val else: OSRExportToProj4(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c log.debug("Params: %s", proj_b) value = proj_b.decode() value = value.strip() for param in value.split(): kv = param.split("=") if len(kv) == 2: k, v = kv try: v = float(v) if v % 1 == 0: v = int(v) except ValueError: # Leave v as a string pass elif len(kv) == 1: k, v = kv[0], True else: raise ValueError("Unexpected proj parameter %s" % param) k = k.lstrip("+") crs[k] = v finally: CPLFree(proj_c) return crs else: log.debug("Projection not found (cogr_crs was NULL)") return {} def get_crs_wkt(self): cdef char *proj_c = NULL cdef void *cogr_crs = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") try: cogr_crs = exc_wrap_pointer(OGR_L_GetSpatialRef(self.cogr_layer)) # TODO: we don't intend to use try/except for flow control # this is a work around for a GDAL issue. except FionaNullPointerError: log.debug("Layer has no coordinate system") except fiona._err.CPLE_OpenFailedError as exc: log.debug("A support file wasn't opened. See the preceding ERROR level message.") cogr_crs = OGR_L_GetSpatialRef(self.cogr_layer) log.debug("Called OGR_L_GetSpatialRef() again without error checking.") if cogr_crs == NULL: raise exc if cogr_crs is not NULL: log.debug("Got coordinate system") try: OSRExportToWkt(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c crs_wkt = proj_b.decode('utf-8') finally: CPLFree(proj_c) return crs_wkt else: log.debug("Projection not found (cogr_crs was NULL)") return "" def get_extent(self): cdef OGREnvelope extent if self.cogr_layer == NULL: raise ValueError("Null layer") result = OGR_L_GetExtent(self.cogr_layer, &extent, 1) self.cursor_interrupted = True if result != OGRERR_NONE: raise DriverError("Driver was not able to calculate bounds") return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) cdef int _get_feature_count(self, force=0): if self.cogr_layer == NULL: raise ValueError("Null layer") self.cursor_interrupted = True return OGR_L_GetFeatureCount(self.cogr_layer, force) def has_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: _deleteOgrFeature(cogr_feature) return True else: return False def get_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: feature = FeatureBuilder().build( cogr_feature, encoding=self._get_internal_encoding(), bbox=False, driver=self.collection.driver, ignore_fields=self.collection.ignore_fields, ignore_geometry=self.collection.ignore_geometry, ) _deleteOgrFeature(cogr_feature) return feature else: raise KeyError("There is no feature with fid {!r}".format(fid)) get = get_feature # TODO: Make this an alias for get_feature in a future version. def __getitem__(self, item): cdef void * cogr_feature if isinstance(item, slice): warnings.warn("Collection slicing is deprecated and will be disabled in a future version.", FionaDeprecationWarning) itr = Iterator(self.collection, item.start, item.stop, item.step) log.debug("Slice: %r", item) return list(itr) elif isinstance(item, int): index = item # from the back if index < 0: ftcount = self._get_feature_count(0) if ftcount == -1: raise IndexError( "collection's dataset does not support negative indexes") index += ftcount cogr_feature = OGR_L_GetFeature(self.cogr_layer, index) if cogr_feature == NULL: return None feature = FeatureBuilder().build( cogr_feature, encoding=self._get_internal_encoding(), bbox=False, driver=self.collection.driver, ignore_fields=self.collection.ignore_fields, ignore_geometry=self.collection.ignore_geometry, ) _deleteOgrFeature(cogr_feature) return feature def isactive(self): if self.cogr_layer != NULL and self.cogr_ds != NULL: return 1 else: return 0 cdef class WritingSession(Session): cdef object _schema_mapping def start(self, collection, **kwargs): cdef OGRSpatialReferenceH cogr_srs = NULL cdef char **options = NULL cdef const char *path_c = NULL cdef const char *driver_c = NULL cdef const char *name_c = NULL cdef const char *proj_c = NULL cdef const char *fileencoding_c = NULL cdef OGRFieldSubType field_subtype cdef int ret path = collection.path self.collection = collection userencoding = kwargs.get('encoding') if collection.mode == 'a': if not os.path.exists(path): raise OSError("No such file or directory %s" % path) try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b try: self.cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) if isinstance(collection.name, string_types): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayerByName(self.cogr_ds, name_c)) elif isinstance(collection.name, int): self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayer(self.cogr_ds, collection.name)) except CPLE_BaseError as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise DriverError(u"{}".format(exc)) else: self._fileencoding = userencoding or self._get_fallback_encoding() before_fields = self.get_schema()['properties'] elif collection.mode == 'w': try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b driver_b = collection.driver.encode() driver_c = driver_b cogr_driver = exc_wrap_pointer(GDALGetDriverByName(driver_c)) if not CPLCheckForFile(path_c, NULL): log.debug("File doesn't exist. Creating a new one...") with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): cogr_ds = gdal_create(cogr_driver, path_c, kwargs) else: if collection.driver == "GeoJSON": # We must manually remove geojson files as GDAL doesn't do this for us. log.debug("Removing GeoJSON file") if path.startswith("/vsi"): VSIUnlink(path_c) else: os.unlink(path) with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): cogr_ds = gdal_create(cogr_driver, path_c, kwargs) else: try: # Attempt to open existing dataset in write mode, # letting GDAL/OGR handle the overwriting. cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) except DriverError: # log.exception("Caught DriverError") # failed, attempt to create it with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): cogr_ds = gdal_create(cogr_driver, path_c, kwargs) else: # check capability of creating a new layer in the existing dataset capability = check_capability_create_layer(cogr_ds) if not capability or collection.name is None: # unable to use existing dataset, recreate it log.debug("Unable to use existing dataset: capability=%r, name=%r", capability, collection.name) GDALClose(cogr_ds) cogr_ds = NULL with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): cogr_ds = gdal_create(cogr_driver, path_c, kwargs) self.cogr_ds = cogr_ds # Set the spatial reference system from the crs given to the # collection constructor. We by-pass the crs_wkt # properties because they aren't accessible until the layer # is constructed (later). try: col_crs = collection._crs_wkt if col_crs: cogr_srs = exc_wrap_pointer(OSRNewSpatialReference(NULL)) proj_b = col_crs.encode('utf-8') proj_c = proj_b OSRSetFromUserInput(cogr_srs, proj_c) osr_set_traditional_axis_mapping_strategy(cogr_srs) except CPLE_BaseError as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise CRSError(u"{}".format(exc)) # Determine which encoding to use. The encoding parameter given to # the collection constructor takes highest precedence, then # 'iso-8859-1' (for shapefiles), then the system's default encoding # as last resort. sysencoding = locale.getpreferredencoding() self._fileencoding = userencoding or ("Shapefile" in collection.driver and 'iso-8859-1') or sysencoding if "Shapefile" in collection.driver: if self._fileencoding: fileencoding_b = self._fileencoding.upper().encode('utf-8') fileencoding_c = fileencoding_b options = CSLSetNameValue(options, "ENCODING", fileencoding_c) # Does the layer exist already? If so, we delete it. layer_count = GDALDatasetGetLayerCount(self.cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = GDALDatasetGetLayer(cogr_ds, i) name_c = OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) idx = -1 if isinstance(collection.name, string_types): if collection.name in layer_names: idx = layer_names.index(collection.name) elif isinstance(collection.name, int): if collection.name >= 0 and collection.name < layer_count: idx = collection.name if idx >= 0: log.debug("Deleted pre-existing layer at %s", collection.name) GDALDatasetDeleteLayer(self.cogr_ds, idx) # Create the named layer in the datasource. name_b = collection.name.encode('utf-8') name_c = name_b for k, v in kwargs.items(): if v is None: continue # We need to remove encoding from the layer creation # options if we're not creating a shapefile. if k == 'encoding' and "Shapefile" not in collection.driver: continue k = k.upper().encode('utf-8') if isinstance(v, bool): v = ('ON' if v else 'OFF').encode('utf-8') else: v = str(v).encode('utf-8') log.debug("Set option %r: %r", k, v) options = CSLAddNameValue(options, k, v) geometry_type = collection.schema.get("geometry", "Unknown") if not isinstance(geometry_type, string_types) and geometry_type is not None: geometry_types = set(geometry_type) if len(geometry_types) > 1: geometry_type = "Unknown" else: geometry_type = geometry_types.pop() if geometry_type == "Any" or geometry_type is None: geometry_type = "Unknown" geometry_code = geometry_type_code(geometry_type) try: self.cogr_layer = exc_wrap_pointer( GDALDatasetCreateLayer( self.cogr_ds, name_c, cogr_srs, geometry_code, options)) except Exception as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL raise DriverIOError(u"{}".format(exc)) finally: if options != NULL: CSLDestroy(options) # Shapefile layers make a copy of the passed srs. GPKG # layers, on the other hand, increment its reference # count. OSRRelease() is the safe way to release # OGRSpatialReferenceH. if cogr_srs != NULL: OSRRelease(cogr_srs) log.debug("Created layer %s", collection.name) # Next, make a layer definition from the given schema properties, # which are an ordered dict since Fiona 1.0.1. encoding = self._get_internal_encoding() # Test if default fields are included in provided schema schema_fields = collection.schema['properties'] default_fields = self.get_schema()['properties'] for key, value in default_fields.items(): if key in schema_fields and not schema_fields[key] == value: raise SchemaError("Property '{}' must have type '{}' " "for driver '{}'".format(key, value, self.collection.driver)) new_fields = OrderedDict([(key, value) for key, value in schema_fields.items() if key not in default_fields]) before_fields = default_fields.copy() before_fields.update(new_fields) for key, value in new_fields.items(): log.debug("Begin creating field: %r value: %r", key, value) field_subtype = OFSTNone # Convert 'long' to 'int'. See # https://github.com/Toblerity/Fiona/issues/101. if fiona.gdal_version.major >= 2 and value in ('int', 'long'): value = 'int64' elif value == 'int': value = 'int32' if value == 'bool': value = 'int32' field_subtype = OFSTBoolean # Is there a field width/precision? width = precision = None if ':' in value: value, fmt = value.split(':') log.debug("Field format parsing, value: %r, fmt: %r", value, fmt) if '.' in fmt: width, precision = map(int, fmt.split('.')) else: width = int(fmt) if value == 'int': if GDAL_VERSION_NUM >= 2000000 and (width == 0 or width >= 10): value = 'int64' else: value = 'int32' field_type = FIELD_TYPES.index(value) try: key_bytes = key.encode(encoding) cogr_fielddefn = exc_wrap_pointer(OGR_Fld_Create(key_bytes, field_type)) if width: OGR_Fld_SetWidth(cogr_fielddefn, width) if precision: OGR_Fld_SetPrecision(cogr_fielddefn, precision) if field_subtype != OFSTNone: # subtypes are new in GDAL 2.x, ignored in 1.x set_field_subtype(cogr_fielddefn, field_subtype) exc_wrap_int(OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1)) except (UnicodeEncodeError, CPLE_BaseError) as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise SchemaError(u"{}".format(exc)) else: OGR_Fld_Destroy(cogr_fielddefn) log.debug("End creating field %r", key) # Mapping of the Python collection schema to the munged # OGR schema. after_fields = self.get_schema()['properties'] self._schema_mapping = dict(zip(before_fields.keys(), after_fields.keys())) log.debug("Writing started") def writerecs(self, records, collection): """Writes buffered records to OGR.""" cdef void *cogr_driver cdef void *cogr_feature cdef int features_in_transaction = 0 cdef void *cogr_layer = self.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") schema_geom_type = collection.schema['geometry'] cogr_driver = GDALGetDatasetDriver(self.cogr_ds) driver_name = OGR_Dr_GetName(cogr_driver).decode("utf-8") valid_geom_types = collection._valid_geom_types def validate_geometry_type(record): if record["geometry"] is None: return True return record["geometry"]["type"].lstrip("3D ") in valid_geom_types transactions_supported = check_capability_transaction(self.cogr_ds) log.debug("Transaction supported: {}".format(transactions_supported)) if transactions_supported: log.debug("Starting transaction (initial)") result = gdal_start_transaction(self.cogr_ds, 0) if result == OGRERR_FAILURE: raise TransactionError("Failed to start transaction") schema_props_keys = set(collection.schema['properties'].keys()) for record in records: log.debug("Creating feature in layer: %s" % record) # Check for optional elements if 'properties' not in record: record['properties'] = {} if 'geometry' not in record: record['geometry'] = None # Validate against collection's schema. if set(record['properties'].keys()) != schema_props_keys: raise ValueError( "Record does not match collection schema: %r != %r" % ( record['properties'].keys(), list(schema_props_keys) )) if not validate_geometry_type(record): raise GeometryTypeValidationError( "Record's geometry type does not match " "collection schema's geometry type: %r != %r" % ( record['geometry']['type'], collection.schema['geometry'] )) cogr_feature = OGRFeatureBuilder().build(record, collection) result = OGR_L_CreateFeature(cogr_layer, cogr_feature) if result != OGRERR_NONE: msg = get_last_error_msg() raise RuntimeError("GDAL Error: {msg} \n \n Failed to write record: " "{record}".format(msg=msg, record=record)) _deleteOgrFeature(cogr_feature) if transactions_supported: features_in_transaction += 1 if features_in_transaction == DEFAULT_TRANSACTION_SIZE: log.debug("Comitting transaction (intermediate)") result = gdal_commit_transaction(self.cogr_ds) if result == OGRERR_FAILURE: raise TransactionError("Failed to commit transaction") log.debug("Starting transaction (intermediate)") result = gdal_start_transaction(self.cogr_ds, 0) if result == OGRERR_FAILURE: raise TransactionError("Failed to start transaction") features_in_transaction = 0 if transactions_supported: log.debug("Comitting transaction (final)") result = gdal_commit_transaction(self.cogr_ds) if result == OGRERR_FAILURE: raise TransactionError("Failed to commit transaction") def sync(self, collection): """Syncs OGR to disk.""" cdef void *cogr_ds = self.cogr_ds cdef void *cogr_layer = self.cogr_layer if cogr_ds == NULL: raise ValueError("Null data source") gdal_flush_cache(cogr_ds) log.debug("Flushed data source cache") cdef class Iterator: """Provides iterated access to feature data. """ # Reference to its Collection cdef collection cdef encoding cdef int next_index cdef stop cdef start cdef step cdef fastindex cdef fastcount cdef ftcount cdef stepsign def __cinit__(self, collection, start=None, stop=None, step=None, bbox=None, mask=None): if collection.session is None: raise ValueError("I/O operation on closed collection") self.collection = collection cdef Session session cdef void *cogr_geometry session = self.collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") OGR_L_ResetReading(cogr_layer) if bbox and mask: raise ValueError("mask and bbox can not be set together") if bbox: OGR_L_SetSpatialFilterRect( cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) elif mask: cogr_geometry = OGRGeomBuilder().build(mask) OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) OGR_G_DestroyGeometry(cogr_geometry) else: OGR_L_SetSpatialFilter(cogr_layer, NULL) self.encoding = session._get_internal_encoding() self.fastindex = OGR_L_TestCapability( session.cogr_layer, OLC_FASTSETNEXTBYINDEX) log.debug("OLC_FASTSETNEXTBYINDEX: {}".format(self.fastindex)) self.fastcount = OGR_L_TestCapability( session.cogr_layer, OLC_FASTFEATURECOUNT) log.debug("OLC_FASTFEATURECOUNT: {}".format(self.fastcount)) # In some cases we need to force count of all features # We need to check if start is not greater ftcount: (start is not None and start > 0) # If start is a negative index: (start is not None and start < 0) # If stop is a negative index: (stop is not None and stop < 0) if ((start is not None and not start == 0) or (stop is not None and stop < 0)): if not self.fastcount: warnings.warn("Layer does not support" \ " OLC_FASTFEATURECOUNT, negative slices or start values other than zero" \ " may be slow.", RuntimeWarning) self.ftcount = session._get_feature_count(1) else: self.ftcount = session._get_feature_count(0) if self.ftcount == -1 and ((start is not None and start < 0) or (stop is not None and stop < 0)): raise IndexError( "collection's dataset does not support negative slice indexes") if stop is not None and stop < 0: stop += self.ftcount if start is None: start = 0 if start is not None and start < 0: start += self.ftcount # step size if step is None: step = 1 if step == 0: raise ValueError("slice step cannot be zero") if step < 0 and not self.fastindex: warnings.warn("Layer does not support" \ " OLCFastSetNextByIndex, negative step size may" \ " be slow.", RuntimeWarning) # Check if we are outside of the range: if not self.ftcount == -1: if start > self.ftcount and step > 0: start = -1 if start > self.ftcount and step < 0: start = self.ftcount - 1 elif self.ftcount == -1 and not start == 0: warnings.warn("Layer is unable to check if slice is within range of data.", RuntimeWarning) self.stepsign = int(math.copysign(1, step)) self.stop = stop self.start = start self.step = step self.next_index = start log.debug("Next index: %d", self.next_index) # Set OGR_L_SetNextByIndex only if within range if start >= 0 and (self.ftcount == -1 or self.start < self.ftcount): exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) session.cursor_interrupted = False def __iter__(self): return self def _next(self): """Internal method to set read cursor to next item""" cdef Session session session = self.collection.session # Check if next_index is valid if self.next_index < 0: raise StopIteration # GeoJSON driver with gdal 2.1 - 2.2 returns last feature # if index greater than number of features if self.ftcount >= 0 and self.next_index >= self.ftcount: raise StopIteration if self.stepsign == 1: if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): raise StopIteration else: if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): raise StopIteration # Set read cursor to next_item position if session.cursor_interrupted: if not self.fastindex and not self.next_index == 0: warnings.warn("Sequential read of iterator was interrupted. Resetting iterator. " "This can negatively impact the performance.", RuntimeWarning) exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) session.cursor_interrupted = False else: if self.step > 1 and self.fastindex: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) elif self.step > 1 and not self.fastindex and not self.next_index == self.start: # GDALs default implementation of SetNextByIndex is calling ResetReading() and then # calling GetNextFeature n times. We can shortcut that if we know the previous index. # OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read for _ in range(self.step - 1): cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration elif self.step > 1 and not self.fastindex and self.next_index == self.start: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) elif self.step < 0: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) # set the next index self.next_index += self.step log.debug("Next index: %d", self.next_index) def __next__(self): cdef OGRFeatureH cogr_feature = NULL cdef OGRLayerH cogr_layer = NULL cdef Session session session = self.collection.session if not session or not session.isactive: raise FionaValueError("Session is inactive, dataset is closed or layer is unavailable.") # Update read cursor self._next() # Get the next feature. cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration try: return FeatureBuilder().build( cogr_feature, encoding=self.collection.session._get_internal_encoding(), bbox=False, driver=self.collection.driver, ignore_fields=self.collection.ignore_fields, ignore_geometry=self.collection.ignore_geometry, ) finally: _deleteOgrFeature(cogr_feature) cdef class ItemsIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = OGR_F_GetFID(cogr_feature) feature = FeatureBuilder().build( cogr_feature, encoding=self.collection.session._get_internal_encoding(), bbox=False, driver=self.collection.driver, ignore_fields=self.collection.ignore_fields, ignore_geometry=self.collection.ignore_geometry, ) _deleteOgrFeature(cogr_feature) return fid, feature cdef class KeysIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = OGR_F_GetFID(cogr_feature) _deleteOgrFeature(cogr_feature) return fid def _remove(path, driver=None): """Deletes an OGR data source """ cdef void *cogr_driver cdef void *cogr_ds cdef int result cdef char *driver_c if driver is None: # attempt to identify the driver by opening the dataset try: cogr_ds = gdal_open_vector(path.encode("utf-8"), 0, None, {}) except (DriverError, FionaNullPointerError): raise DatasetDeleteError("Failed to remove data source {}".format(path)) cogr_driver = GDALGetDatasetDriver(cogr_ds) GDALClose(cogr_ds) else: cogr_driver = GDALGetDriverByName(driver.encode("utf-8")) if cogr_driver == NULL: raise DatasetDeleteError("Null driver when attempting to delete {}".format(path)) if not OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): raise DatasetDeleteError("Driver does not support dataset removal operation") result = GDALDeleteDataset(cogr_driver, path.encode('utf-8')) if result != OGRERR_NONE: raise DatasetDeleteError("Failed to remove data source {}".format(path)) def _remove_layer(path, layer, driver=None): cdef void *cogr_ds cdef int layer_index if isinstance(layer, integer_types): layer_index = layer layer_str = str(layer_index) else: layer_names = _listlayers(path) try: layer_index = layer_names.index(layer) except ValueError: raise ValueError("Layer \"{}\" does not exist in datasource: {}".format(layer, path)) layer_str = '"{}"'.format(layer) if layer_index < 0: layer_names = _listlayers(path) layer_index = len(layer_names) + layer_index try: cogr_ds = gdal_open_vector(path.encode("utf-8"), 1, None, {}) except (DriverError, FionaNullPointerError): raise DatasetDeleteError("Failed to remove data source {}".format(path)) result = GDALDatasetDeleteLayer(cogr_ds, layer_index) GDALClose(cogr_ds) if result == OGRERR_UNSUPPORTED_OPERATION: raise DatasetDeleteError("Removal of layer {} not supported by driver".format(layer_str)) elif result != OGRERR_NONE: raise DatasetDeleteError("Failed to remove layer {} from datasource: {}".format(layer_str, path)) def _listlayers(path, **kwargs): """Provides a list of the layers in an OGR data source. """ cdef void *cogr_ds = NULL cdef void *cogr_layer = NULL cdef const char *path_c cdef const char *name_c # Open OGR data source. try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b cogr_ds = gdal_open_vector(path_c, 0, None, kwargs) # Loop over the layers to get their names. layer_count = GDALDatasetGetLayerCount(cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = GDALDatasetGetLayer(cogr_ds, i) name_c = OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) # Close up data source. if cogr_ds != NULL: GDALClose(cogr_ds) cogr_ds = NULL return layer_names def buffer_to_virtual_file(bytesbuf, ext=''): """Maps a bytes buffer to a virtual file. `ext` is empty or begins with a period and contains at most one period. """ vsi_filename = '/vsimem/{}'.format(uuid4().hex + ext) vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') vsi_handle = VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) if vsi_handle == NULL: raise OSError('failed to map buffer to file') if VSIFCloseL(vsi_handle) != 0: raise OSError('failed to close mapped file handle') return vsi_filename def remove_virtual_file(vsi_filename): vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') return VSIUnlink(vsi_cfilename) cdef class MemoryFileBase: """Base for a BytesIO-like class backed by an in-memory file.""" cdef VSILFILE * _vsif def __init__(self, file_or_bytes=None, dirname=None, filename=None, ext=''): """A file in an in-memory filesystem. Parameters ---------- file_or_bytes : file or bytes A file opened in binary mode or bytes filename : str A filename for the in-memory file under /vsimem ext : str A file extension for the in-memory file under /vsimem. Ignored if filename was provided. """ if file_or_bytes: if hasattr(file_or_bytes, 'read'): initial_bytes = file_or_bytes.read() elif isinstance(file_or_bytes, bytes): initial_bytes = file_or_bytes else: raise TypeError( "Constructor argument must be a file opened in binary " "mode or bytes.") else: initial_bytes = b'' # Make an in-memory directory specific to this dataset to help organize # auxiliary files. self._dirname = dirname or str(uuid4().hex) VSIMkdir("/vsimem/{0}".format(self._dirname).encode("utf-8"), 0666) if filename: # GDAL's SRTMHGT driver requires the filename to be "correct" (match # the bounds being written) self.name = "/vsimem/{0}/{1}".format(self._dirname, filename) else: # GDAL 2.1 requires a .zip extension for zipped files. self.name = "/vsimem/{0}/{0}{1}".format(self._dirname, ext) name_b = self.name.encode('utf-8') self._initial_bytes = initial_bytes cdef unsigned char *buffer = self._initial_bytes if self._initial_bytes: self._vsif = VSIFileFromMemBuffer( name_b, buffer, len(self._initial_bytes), 0) self.mode = "r" else: self._vsif = NULL self.mode = "r+" self.closed = False def _open(self): """Ensure that the instance has a valid VSI file handle.""" cdef VSILFILE *fp = NULL name_b = self.name.encode('utf-8') if not self.exists(): fp = VSIFOpenL(name_b, "w") if fp == NULL: raise OSError("VSIFOpenL failed") else: VSIFCloseL(fp) self._vsif = NULL if self._vsif == NULL: fp = VSIFOpenL(name_b, self.mode.encode("utf-8")) if fp == NULL: log.error("VSIFOpenL failed: name=%r, mode=%r", self.name, self.mode) raise OSError("VSIFOpenL failed") else: self._vsif = fp def _ensure_extension(self, drivername=None): """Ensure that the instance's name uses a file extension supported by the driver.""" # Avoid a crashing bug with GDAL versions < 2. if get_gdal_version_tuple() < (2, ): return name_b = drivername.encode("utf-8") cdef const char *name_c = name_b cdef GDALDriverH driver = GDALGetDriverByName(name_c) cdef const char *extension_c = GDALGetMetadataItem(driver, "DMD_EXTENSION", NULL) if extension_c != NULL: extension_b = extension_c recommended_extension = extension_b.decode("utf-8") if not recommended_extension.startswith("."): recommended_extension = "." + recommended_extension root, ext = os.path.splitext(self.name) if not ext: log.info("Setting extension: root=%r, extension=%r", root, recommended_extension) self.name = root + recommended_extension def exists(self): """Test if the in-memory file exists. Returns ------- bool True if the in-memory file exists. """ cdef VSIStatBufL st_buf name_b = self.name.encode('utf-8') return VSIStatL(name_b, &st_buf) == 0 def __len__(self): """Length of the file's buffer in number of bytes. Returns ------- int """ if not self.getbuffer(): return 0 return self.getbuffer().size def getbuffer(self): """Return a view on bytes of the file, or None.""" cdef unsigned char *buffer = NULL cdef vsi_l_offset buffer_len = 0 cdef unsigned char [:] buff_view name_b = self.name.encode('utf-8') buffer = VSIGetMemFileBuffer(name_b, &buffer_len, 0) if buffer == NULL or buffer_len == 0: return None else: buff_view = buffer return buff_view def close(self): """Close and tear down VSI file and directory.""" if self._vsif != NULL: VSIFCloseL(self._vsif) self._vsif = NULL # As soon as support for GDAL < 3 is dropped, we can switch # to VSIRmdirRecursive. VSIUnlink(self.name.encode("utf-8")) VSIRmdir(self._dirname.encode("utf-8")) self.closed = True def seek(self, offset, whence=0): self._open() return VSIFSeekL(self._vsif, offset, whence) def tell(self): self._open() if self._vsif != NULL: return VSIFTellL(self._vsif) else: return 0 def read(self, size=-1): """Read size bytes from MemoryFile.""" cdef bytes result cdef unsigned char *buffer = NULL cdef vsi_l_offset buffer_len = 0 if size < 0: name_b = self.name.encode('utf-8') buffer = VSIGetMemFileBuffer(name_b, &buffer_len, 0) size = buffer_len buffer = CPLMalloc(size) self._open() try: objects_read = VSIFReadL(buffer, 1, size, self._vsif) result = buffer[:objects_read] return result finally: CPLFree(buffer) return result def write(self, data): """Write data bytes to MemoryFile""" cdef const unsigned char *view = data n = len(data) self._open() result = VSIFWriteL(view, 1, n, self._vsif) VSIFFlushL(self._vsif) return result Fiona-1.8.21/fiona/ogrext1.pxd000066400000000000000000000255551420023252700160520ustar00rootroot00000000000000# Copyright (c) 2007, Sean C. Gillies # All rights reserved. # See ../LICENSE.txt from libc.stdio cimport FILE cdef extern from "gdal.h": ctypedef void * GDALDriverH ctypedef void * GDALMajorObjectH const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) char * GDALVersionInfo (char *pszRequest) cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) void CPLSetConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) int CPLCheckForFile(char *, char **) cdef extern from "cpl_string.h": char ** CSLAddNameValue (char **list, char *name, char *value) char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) cdef extern from "sys/stat.h" nogil: struct stat: pass cdef extern from "cpl_vsi.h" nogil: ctypedef int vsi_l_offset ctypedef FILE VSILFILE ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, int take_ownership) VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, vsi_l_offset data_len, int take_ownership) VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) int VSIMkdir(const char *path, long mode) int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY cdef extern from "ogr_core.h": ctypedef enum OGRwkbGeometryType: wkbUnknown wkbPoint wkbLineString wkbPolygon wkbMultiPoint wkbMultiLineString wkbMultiPolygon wkbGeometryCollection wkbCircularString wkbCompoundCurve wkbCurvePolygon wkbMultiCurve wkbMultiSurface wkbCurve wkbSurface wkbPolyhedralSurface wkbTIN wkbTriangle wkbNone wkbLinearRing wkbCircularStringZ wkbCompoundCurveZ wkbCurvePolygonZ wkbMultiCurveZ wkbMultiSurfaceZ wkbCurveZ wkbSurfaceZ wkbPolyhedralSurfaceZ wkbTINZ wkbTriangleZ wkbPointM wkbLineStringM wkbPolygonM wkbMultiPointM wkbMultiLineStringM wkbMultiPolygonM wkbGeometryCollectionM wkbCircularStringM wkbCompoundCurveM wkbCurvePolygonM wkbMultiCurveM wkbMultiSurfaceM wkbCurveM wkbSurfaceM wkbPolyhedralSurfaceM wkbTINM wkbTriangleM wkbPointZM wkbLineStringZM wkbPolygonZM wkbMultiPointZM wkbMultiLineStringZM wkbMultiPolygonZM wkbGeometryCollectionZM wkbCircularStringZM wkbCompoundCurveZM wkbCurvePolygonZM wkbMultiCurveZM wkbMultiSurfaceZM wkbCurveZM wkbSurfaceZM wkbPolyhedralSurfaceZM wkbTINZM wkbTriangleZM wkbPoint25D wkbLineString25D wkbPolygon25D wkbMultiPoint25D wkbMultiLineString25D wkbMultiPolygon25D wkbGeometryCollection25D ctypedef enum OGRFieldType: OFTInteger OFTIntegerList OFTReal OFTRealList OFTString OFTStringList OFTWideString OFTWideStringList OFTBinary OFTDate OFTTime OFTDateTime OFTMaxType char * OGRGeometryTypeToName(int) char * ODsCCreateLayer = "CreateLayer" char * ODsCDeleteLayer = "DeleteLayer" cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRFixup (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) cdef extern from "ogr_api.h": const char * OGR_Dr_GetName (void *driver) void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) int OGR_Dr_DeleteDataSource (void *driver, char *) void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) int OGR_DS_DeleteLayer (void *datasource, int n) void * OGR_DS_CreateLayer (void *datasource, char *name, void *crs, int geomType, char **options) void * OGR_DS_ExecuteSQL (void *datasource, char *name, void *filter, char *dialext) void OGR_DS_Destroy (void *datasource) void * OGR_DS_GetDriver (void *layer_defn) void * OGR_DS_GetLayerByName (void *datasource, char *name) int OGR_DS_GetLayerCount (void *datasource) void * OGR_DS_GetLayer (void *datasource, int n) void OGR_DS_ReleaseResultSet (void *datasource, void *results) int OGR_DS_SyncToDisk (void *datasource) int OGR_DS_TestCapability(void *datasource, char *capability) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) int OGR_Fld_GetType (void *fielddefn) int OGR_Fld_GetWidth (void *fielddefn) void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) void * OGR_G_Clone(void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) void * OGR_L_GetSpatialFilter (void *layer) void * OGR_L_GetSpatialRef (void *layer) void OGR_L_ResetReading (void *layer) void OGR_L_SetSpatialFilter (void *layer, void *geometry) void OGR_L_SetSpatialFilterRect ( void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) void * OGRGetDriverByName (char *) void * OGROpen (char *path, int mode, void *x) void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) Fiona-1.8.21/fiona/ogrext2.pxd000066400000000000000000000305361420023252700160460ustar00rootroot00000000000000# Copyright (c) 2007, Sean C. Gillies # All rights reserved. # See ../LICENSE.txt from libc.stdio cimport FILE cdef extern from "ogr_core.h": ctypedef int OGRErr ctypedef enum OGRwkbGeometryType: wkbUnknown wkbPoint wkbLineString wkbPolygon wkbMultiPoint wkbMultiLineString wkbMultiPolygon wkbGeometryCollection wkbCircularString wkbCompoundCurve wkbCurvePolygon wkbMultiCurve wkbMultiSurface wkbCurve wkbSurface wkbPolyhedralSurface wkbTIN wkbTriangle wkbNone wkbLinearRing wkbCircularStringZ wkbCompoundCurveZ wkbCurvePolygonZ wkbMultiCurveZ wkbMultiSurfaceZ wkbCurveZ wkbSurfaceZ wkbPolyhedralSurfaceZ wkbTINZ wkbTriangleZ wkbPointM wkbLineStringM wkbPolygonM wkbMultiPointM wkbMultiLineStringM wkbMultiPolygonM wkbGeometryCollectionM wkbCircularStringM wkbCompoundCurveM wkbCurvePolygonM wkbMultiCurveM wkbMultiSurfaceM wkbCurveM wkbSurfaceM wkbPolyhedralSurfaceM wkbTINM wkbTriangleM wkbPointZM wkbLineStringZM wkbPolygonZM wkbMultiPointZM wkbMultiLineStringZM wkbMultiPolygonZM wkbGeometryCollectionZM wkbCircularStringZM wkbCompoundCurveZM wkbCurvePolygonZM wkbMultiCurveZM wkbMultiSurfaceZM wkbCurveZM wkbSurfaceZM wkbPolyhedralSurfaceZM wkbTINZM wkbTriangleZM wkbPoint25D wkbLineString25D wkbPolygon25D wkbMultiPoint25D wkbMultiLineString25D wkbMultiPolygon25D wkbGeometryCollection25D ctypedef enum OGRFieldType: OFTInteger OFTIntegerList OFTReal OFTRealList OFTString OFTStringList OFTWideString OFTWideStringList OFTBinary OFTDate OFTTime OFTDateTime OFTInteger64 OFTInteger64List OFTMaxType ctypedef int OGRFieldSubType cdef int OFSTNone = 0 cdef int OFSTBoolean = 1 cdef int OFSTInt16 = 2 cdef int OFSTFloat32 = 3 cdef int OFSTMaxSubType = 3 ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY char * OGRGeometryTypeToName(int) char * ODsCCreateLayer = "CreateLayer" char * ODsCDeleteLayer = "DeleteLayer" char * ODsCTransactions = "Transactions" cdef extern from "gdal.h": ctypedef void * GDALDriverH ctypedef void * GDALMajorObjectH char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, unsigned int nOpenFlags, const char *const *papszAllowedDrivers, const char *const *papszOpenOptions, const char *const *papszSiblingFiles ) int GDAL_OF_UPDATE int GDAL_OF_READONLY int GDAL_OF_VECTOR int GDAL_OF_VERBOSE_ERROR int GDALDatasetGetLayerCount(void * hds) void * GDALDatasetGetLayer(void * hDS, int iLayer) void * GDALDatasetGetLayerByName(void * hDS, char * pszName) void GDALClose(void * hDS) void * GDALCreate(void * hDriver, const char * pszFilename, int nXSize, int nYSize, int nBands, GDALDataType eBandType, char ** papszOptions) void * GDALDatasetCreateLayer(void * hDS, const char * pszName, void * hSpatialRef, int eType, char ** papszOptions) int GDALDatasetDeleteLayer(void * hDS, int iLayer) void GDALFlushCache(void * hDS) char * GDALGetDriverShortName(void * hDriver) char * GDALGetDatasetDriver (void * hDataset) int GDALDeleteDataset(void * hDriver, const char * pszFilename) OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) OGRErr GDALDatasetCommitTransaction (void * hDataset) OGRErr GDALDatasetRollbackTransaction (void * hDataset) int GDALDatasetTestCapability (void * hDataset, char *) const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) ctypedef enum GDALDataType: GDT_Unknown GDT_Byte GDT_UInt16 GDT_Int16 GDT_UInt32 GDT_Int32 GDT_Float32 GDT_Float64 GDT_CInt16 GDT_CInt32 GDT_CFloat32 GDT_CFloat64 GDT_TypeCount cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) int CPLCheckForFile(char *, char **) cdef extern from "cpl_string.h": char ** CSLAddNameValue (char **list, const char *name, const char *value) char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) cdef extern from "sys/stat.h" nogil: struct stat: pass cdef extern from "cpl_vsi.h" nogil: ctypedef int vsi_l_offset ctypedef FILE VSILFILE ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, int take_ownership) VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, vsi_l_offset data_len, int take_ownership) VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) int VSIMkdir(const char *path, long mode) int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRFixup (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) cdef extern from "ogr_api.h": const char * OGR_Dr_GetName (void *driver) int OGR_Dr_TestCapability (void *driver, const char *) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) int OGR_Fld_GetType (void *fielddefn) int OGR_Fld_GetWidth (void *fielddefn) void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) void * OGR_G_Clone(void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) void * OGR_L_GetSpatialFilter (void *layer) void * OGR_L_GetSpatialRef (void *layer) void OGR_L_ResetReading (void *layer) void OGR_L_SetSpatialFilter (void *layer, void *geometry) void OGR_L_SetSpatialFilterRect ( void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) Fiona-1.8.21/fiona/ogrext3.pxd000066400000000000000000000305671420023252700160530ustar00rootroot00000000000000# Copyright (c) 2007, Sean C. Gillies # All rights reserved. # See ../LICENSE.txt from libc.stdio cimport FILE cdef extern from "ogr_core.h": ctypedef int OGRErr ctypedef enum OGRwkbGeometryType: wkbUnknown wkbPoint wkbLineString wkbPolygon wkbMultiPoint wkbMultiLineString wkbMultiPolygon wkbGeometryCollection wkbCircularString wkbCompoundCurve wkbCurvePolygon wkbMultiCurve wkbMultiSurface wkbCurve wkbSurface wkbPolyhedralSurface wkbTIN wkbTriangle wkbNone wkbLinearRing wkbCircularStringZ wkbCompoundCurveZ wkbCurvePolygonZ wkbMultiCurveZ wkbMultiSurfaceZ wkbCurveZ wkbSurfaceZ wkbPolyhedralSurfaceZ wkbTINZ wkbTriangleZ wkbPointM wkbLineStringM wkbPolygonM wkbMultiPointM wkbMultiLineStringM wkbMultiPolygonM wkbGeometryCollectionM wkbCircularStringM wkbCompoundCurveM wkbCurvePolygonM wkbMultiCurveM wkbMultiSurfaceM wkbCurveM wkbSurfaceM wkbPolyhedralSurfaceM wkbTINM wkbTriangleM wkbPointZM wkbLineStringZM wkbPolygonZM wkbMultiPointZM wkbMultiLineStringZM wkbMultiPolygonZM wkbGeometryCollectionZM wkbCircularStringZM wkbCompoundCurveZM wkbCurvePolygonZM wkbMultiCurveZM wkbMultiSurfaceZM wkbCurveZM wkbSurfaceZM wkbPolyhedralSurfaceZM wkbTINZM wkbTriangleZM wkbPoint25D wkbLineString25D wkbPolygon25D wkbMultiPoint25D wkbMultiLineString25D wkbMultiPolygon25D wkbGeometryCollection25D ctypedef enum OGRFieldType: OFTInteger OFTIntegerList OFTReal OFTRealList OFTString OFTStringList OFTWideString OFTWideStringList OFTBinary OFTDate OFTTime OFTDateTime OFTInteger64 OFTInteger64List OFTMaxType ctypedef int OGRFieldSubType cdef int OFSTNone = 0 cdef int OFSTBoolean = 1 cdef int OFSTInt16 = 2 cdef int OFSTFloat32 = 3 cdef int OFSTMaxSubType = 3 ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY char * OGRGeometryTypeToName(int) char * ODsCCreateLayer = "CreateLayer" char * ODsCDeleteLayer = "DeleteLayer" char * ODsCTransactions = "Transactions" cdef extern from "gdal.h": ctypedef void * GDALDriverH ctypedef void * GDALMajorObjectH char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, unsigned int nOpenFlags, const char *const *papszAllowedDrivers, const char *const *papszOpenOptions, const char *const *papszSiblingFiles ) int GDAL_OF_UPDATE int GDAL_OF_READONLY int GDAL_OF_VECTOR int GDAL_OF_VERBOSE_ERROR int GDALDatasetGetLayerCount(void * hds) void * GDALDatasetGetLayer(void * hDS, int iLayer) void * GDALDatasetGetLayerByName(void * hDS, char * pszName) void GDALClose(void * hDS) void * GDALCreate(void * hDriver, const char * pszFilename, int nXSize, int nYSize, int nBands, GDALDataType eBandType, char ** papszOptions) void * GDALDatasetCreateLayer(void * hDS, const char * pszName, void * hSpatialRef, int eType, char ** papszOptions) int GDALDatasetDeleteLayer(void * hDS, int iLayer) void GDALFlushCache(void * hDS) char * GDALGetDriverShortName(void * hDriver) char * GDALGetDatasetDriver (void * hDataset) int GDALDeleteDataset(void * hDriver, const char * pszFilename) OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) OGRErr GDALDatasetCommitTransaction (void * hDataset) OGRErr GDALDatasetRollbackTransaction (void * hDataset) int GDALDatasetTestCapability (void * hDataset, char *) const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) ctypedef enum GDALDataType: GDT_Unknown GDT_Byte GDT_UInt16 GDT_Int16 GDT_UInt32 GDT_Int32 GDT_Float32 GDT_Float64 GDT_CInt16 GDT_CInt32 GDT_CFloat32 GDT_CFloat64 GDT_TypeCount cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) int CPLCheckForFile(char *, char **) cdef extern from "cpl_string.h": char ** CSLAddNameValue (char **list, const char *name, const char *value) char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) cdef extern from "sys/stat.h" nogil: struct stat: pass cdef extern from "cpl_vsi.h" nogil: ctypedef int vsi_l_offset ctypedef FILE VSILFILE ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, int take_ownership) VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, vsi_l_offset data_len, int take_ownership) VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) int VSIMkdir(const char *path, long mode) int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) void OSRGetPROJVersion (int *pnMajor, int *pnMinor, int *pnPatch) cdef extern from "ogr_api.h": const char * OGR_Dr_GetName (void *driver) int OGR_Dr_TestCapability (void *driver, const char *) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) int OGR_Fld_GetType (void *fielddefn) int OGR_Fld_GetWidth (void *fielddefn) void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) OGRErr OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) void * OGR_G_Clone(void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) void * OGR_L_GetSpatialFilter (void *layer) void * OGR_L_GetSpatialRef (void *layer) void OGR_L_ResetReading (void *layer) void OGR_L_SetSpatialFilter (void *layer, void *geometry) void OGR_L_SetSpatialFilterRect ( void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) Fiona-1.8.21/fiona/path.py000066400000000000000000000111121420023252700152320ustar00rootroot00000000000000"""Dataset paths, identifiers, and filenames""" import re import sys import attr from fiona.compat import urlparse # Supported URI schemes and their mapping to GDAL's VSI suffix. # TODO: extend for other cloud plaforms. SCHEMES = { 'ftp': 'curl', 'gzip': 'gzip', 'http': 'curl', 'https': 'curl', 's3': 's3', 'tar': 'tar', 'zip': 'zip', 'file': 'file', 'gs': 'gs', } CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) # TODO: extend for other cloud plaforms. REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) class Path(object): """Base class for dataset paths""" @attr.s(slots=True) class ParsedPath(Path): """Result of parsing a dataset URI/Path Attributes ---------- path : str Parsed path. Includes the hostname and query string in the case of a URI. archive : str Parsed archive path. scheme : str URI scheme such as "https" or "zip+s3". """ path = attr.ib() archive = attr.ib() scheme = attr.ib() @classmethod def from_uri(cls, uri): parts = urlparse(uri) path = parts.path scheme = parts.scheme or None if parts.query: path += "?" + parts.query if parts.scheme and parts.netloc: path = parts.netloc + path parts = path.split('!') path = parts.pop() if parts else None archive = parts.pop() if parts else None return ParsedPath(path, archive, scheme) @property def name(self): """The parsed path's original URI""" if not self.scheme: return self.path elif self.archive: return "{}://{}!{}".format(self.scheme, self.archive, self.path) else: return "{}://{}".format(self.scheme, self.path) @property def is_remote(self): """Test if the path is a remote, network URI""" return self.scheme and self.scheme.split('+')[-1] in REMOTESCHEMES @property def is_local(self): """Test if the path is a local URI""" return not self.scheme or (self.scheme and self.scheme.split('+')[-1] not in REMOTESCHEMES) @attr.s(slots=True) class UnparsedPath(Path): """Encapsulates legacy GDAL filenames Attributes ---------- path : str The legacy GDAL filename. """ path = attr.ib() @property def name(self): """The unparsed path's original path""" return self.path def parse_path(path): """Parse a dataset's identifier or path into its parts Parameters ---------- path : str or path-like object The path to be parsed. Returns ------- ParsedPath or UnparsedPath Notes ----- When legacy GDAL filenames are encountered, they will be returned in a UnparsedPath. """ if isinstance(path, Path): return path # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like # URL schemes elif sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): return UnparsedPath(path) elif path.startswith('/vsi'): return UnparsedPath(path) elif re.match("^[a-z0-9\\+]*://", path): parts = urlparse(path) # if the scheme is not one of Rasterio's supported schemes, we # return an UnparsedPath. if parts.scheme and not all(p in SCHEMES for p in parts.scheme.split('+')): return UnparsedPath(path) else: return ParsedPath.from_uri(path) else: return UnparsedPath(path) def vsi_path(path): """Convert a parsed path to a GDAL VSI path Parameters ---------- path : Path A ParsedPath or UnparsedPath object. Returns ------- str """ if isinstance(path, UnparsedPath): return path.path elif isinstance(path, ParsedPath): if not path.scheme: return path.path else: if path.scheme.split('+')[-1] in CURLSCHEMES: suffix = '{}://'.format(path.scheme.split('+')[-1]) else: suffix = '' prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in path.scheme.split('+') if p != 'file') if prefix: if path.archive: result = '/{}/{}{}/{}'.format(prefix, suffix, path.archive, path.path.lstrip('/')) else: result = '/{}/{}{}'.format(prefix, suffix, path.path) else: result = path.path return result else: raise ValueError("path must be a ParsedPath or UnparsedPath object") Fiona-1.8.21/fiona/rfc3339.py000066400000000000000000000070611420023252700154020ustar00rootroot00000000000000# Fiona's date and time is founded on RFC 3339. # # OGR knows 3 time "zones": GMT, "local time", amd "unknown". Fiona, when # writing will convert times with a timezone offset to GMT (Z) and otherwise # will write times with the unknown zone. import logging import re log = logging.getLogger("Fiona") # Fiona's 'date', 'time', and 'datetime' types are sub types of 'str'. class FionaDateType(str): """Dates without time.""" class FionaTimeType(str): """Times without dates.""" class FionaDateTimeType(str): """Dates and times.""" pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") pattern_time = re.compile( r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") pattern_datetime = re.compile( r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") class group_accessor(object): def __init__(self, m): self.match = m def group(self, i): try: return self.match.group(i) or 0 except IndexError: return 0 def parse_time(text): """ Given a time, returns a datetime tuple Parameters ---------- text: string to be parsed Returns ------- (int, int , int, int, int, int, int, int): datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) """ match = re.search(pattern_time, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) if g.group(8) == '-': tz = -1.0 * (int(g.group(9)) * 60 + int(g.group(11))) elif g.group(8) == '+': tz = int(g.group(9)) * 60 + int(g.group(11)) else: tz = None return (0, 0, 0, int(g.group(1)), int(g.group(3)), int(g.group(5)), int(1000000.0 * float(g.group(6))), tz ) def parse_date(text): """Given a date, returns a datetime tuple Parameters ---------- text: string to be parsed Returns ------- (int, int , int, int, int, int, int, int): datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) """ match = re.search(pattern_date, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return ( int(g.group(1)), int(g.group(3)), int(g.group(5)), 0, 0, 0, 0, None) def parse_datetime(text): """Given a datetime, returns a datetime tuple Parameters ---------- text: string to be parsed Returns ------- (int, int , int, int, int, int, int, int): datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) """ match = re.search(pattern_datetime, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) if g.group(14) == '-': tz = -1.0 * (int(g.group(15)) * 60 + int(g.group(17))) elif g.group(14) == '+': tz = int(g.group(15)) * 60 + int(g.group(17)) else: tz = None return ( int(g.group(1)), int(g.group(3)), int(g.group(5)), int(g.group(7)), int(g.group(9)), int(g.group(11)), int(1000000.0 * float(g.group(12))), tz) Fiona-1.8.21/fiona/schema.pyx000066400000000000000000000046731420023252700157440ustar00rootroot00000000000000from six import text_type from fiona.errors import SchemaError from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType cdef extern from "gdal.h": char * GDALVersionInfo (char *pszRequest) def _get_gdal_version_num(): """Return current internal version number of gdal""" return int(GDALVersionInfo("VERSION_NUM")) GDAL_VERSION_NUM = _get_gdal_version_num() # Mapping of OGR integer field types to Fiona field type names. # Lists are currently unsupported in this version, but might be done as # arrays in a future version. FIELD_TYPES = [ 'int32', # OFTInteger, Simple 32bit integer None, # OFTIntegerList, List of 32bit integers 'float', # OFTReal, Double Precision floating point None, # OFTRealList, List of doubles 'str', # OFTString, String of UTF-8 chars None, # OFTStringList, Array of strings None, # OFTWideString, deprecated None, # OFTWideStringList, deprecated 'bytes', # OFTBinary, Raw Binary data 'date', # OFTDate, Date 'time', # OFTTime, Time 'datetime', # OFTDateTime, Date and Time 'int64', # OFTInteger64, Single 64bit integer None # OFTInteger64List, List of 64bit integers ] # Mapping of Fiona field type names to Python types. FIELD_TYPES_MAP = { 'int32': int, 'float': float, 'str': text_type, 'date': FionaDateType, 'time': FionaTimeType, 'datetime': FionaDateTimeType, 'bytes': bytes, 'int64': int, 'int': int } FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in FIELD_TYPES_MAP.items()]) FIELD_TYPES_MAP_REV[int] = 'int' def normalize_field_type(ftype): """Normalize free form field types to an element of FIELD_TYPES Parameters ---------- ftype : str A type:width format like 'int:9' or 'str:255' Returns ------- str An element from FIELD_TYPES """ if ftype in FIELD_TYPES: return ftype elif ftype == 'bool': return 'bool' elif ftype.startswith('int'): width = int((ftype.split(':')[1:] or ['0'])[0]) if GDAL_VERSION_NUM >= 2000000 and (width == 0 or width >= 10): return 'int64' else: return 'int32' elif ftype.startswith('str'): return 'str' elif ftype.startswith('float'): return 'float' else: raise SchemaError("Unknown field type: {}".format(ftype)) Fiona-1.8.21/fiona/session.py000066400000000000000000000145271420023252700157760ustar00rootroot00000000000000"""Abstraction for sessions in various clouds.""" from fiona.path import parse_path, UnparsedPath class Session(object): """Base for classes that configure access to secured resources. Attributes ---------- credentials : dict Keys and values for session credentials. Notes ----- This class is not intended to be instantiated. """ def get_credential_options(self): """Get credentials as GDAL configuration options Returns ------- dict """ return NotImplementedError @staticmethod def from_foreign_session(session, cls=None): """Create a session object matching the foreign `session`. Parameters ---------- session : obj A foreign session object. cls : Session class, optional The class to return. Returns ------- Session """ if not cls: return DummySession() else: return cls(session) @staticmethod def from_path(path, *args, **kwargs): """Create a session object suited to the data at `path`. Parameters ---------- path : str A dataset path or identifier. args : sequence Positional arguments for the foreign session constructor. kwargs : dict Keyword arguments for the foreign session constructor. Returns ------- Session """ if not path: return DummySession() path = parse_path(path) if isinstance(path, UnparsedPath) or path.is_local: return DummySession() elif path.scheme == "s3" or path.scheme.endswith("+s3") or "amazonaws.com" in path.path: return AWSSession(*args, **kwargs) # This factory can be extended to other cloud providers here. # elif path.scheme == "cumulonimbus": # for example. # return CumulonimbusSession(*args, **kwargs) else: return DummySession() class DummySession(Session): """A dummy session. Attributes ---------- credentials : dict The session credentials. """ def __init__(self, *args, **kwargs): self._session = None self.credentials = {} def get_credential_options(self): """Get credentials as GDAL configuration options Returns ------- dict """ return {} class AWSSession(Session): """Configures access to secured resources stored in AWS S3. """ def __init__( self, session=None, aws_unsigned=False, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, profile_name=None, requester_pays=False): """Create a new boto3 session Parameters ---------- session : optional A boto3 session object. aws_unsigned : bool, optional (default: False) If True, requests will be unsigned. aws_access_key_id : str, optional An access key id, as per boto3. aws_secret_access_key : str, optional A secret access key, as per boto3. aws_session_token : str, optional A session token, as per boto3. region_name : str, optional A region name, as per boto3. profile_name : str, optional A shared credentials profile name, as per boto3. requester_pays : bool, optional True if the requester agrees to pay transfer costs (default: False) """ import boto3 if session: self._session = session else: self._session = boto3.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, profile_name=profile_name) self.requester_pays = requester_pays self.unsigned = aws_unsigned self._creds = self._session._session.get_credentials() @property def credentials(self): """The session credentials as a dict""" creds = {} if self._creds: if self._creds.access_key: # pragma: no branch creds['aws_access_key_id'] = self._creds.access_key if self._creds.secret_key: # pragma: no branch creds['aws_secret_access_key'] = self._creds.secret_key if self._creds.token: creds['aws_session_token'] = self._creds.token if self._session.region_name: creds['aws_region'] = self._session.region_name if self.requester_pays: creds['aws_request_payer'] = 'requester' return creds def get_credential_options(self): """Get credentials as GDAL configuration options Returns ------- dict """ if self.unsigned: return {'AWS_NO_SIGN_REQUEST': 'YES'} else: return {k.upper(): v for k, v in self.credentials.items()} class GSSession(Session): """Configures access to secured resources stored in Google Cloud Storage """ def __init__(self, google_application_credentials=None): """Create new Google Cloude Storage session Parameters ---------- google_application_credentials: string Path to the google application credentials JSON file. """ self._creds = {} if google_application_credentials is not None: self._creds['google_application_credentials'] = google_application_credentials @classmethod def hascreds(cls, config): """Determine if the given configuration has proper credentials Parameters ---------- cls : class A Session class. config : dict GDAL configuration as a dict. Returns ------- bool """ return 'GOOGLE_APPLICATION_CREDENTIALS' in config @property def credentials(self): """The session credentials as a dict""" return self._creds def get_credential_options(self): """Get credentials as GDAL configuration options Returns ------- dict """ return {k.upper(): v for k, v in self.credentials.items()} Fiona-1.8.21/fiona/transform.py000066400000000000000000000065701420023252700163250ustar00rootroot00000000000000"""Coordinate and geometry warping and reprojection""" import fiona._loading with fiona._loading.add_gdal_dll_directories(): from fiona._transform import _transform, _transform_geom def transform(src_crs, dst_crs, xs, ys): """Transform coordinates from one reference system to another. Parameters ---------- src_crs: str or dict A string like 'EPSG:4326' or a dict of proj4 parameters like {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} representing the coordinate reference system on the "source" or "from" side of the transformation. dst_crs: str or dict A string or dict representing the coordinate reference system on the "destination" or "to" side of the transformation. xs: sequence of float A list or tuple of x coordinate values. Must have the same length as the ``ys`` parameter. ys: sequence of float A list or tuple of y coordinate values. Must have the same length as the ``xs`` parameter. Returns ------- xp, yp: list of float A pair of transformed coordinate sequences. The elements of ``xp`` and ``yp`` correspond exactly to the elements of the ``xs`` and ``ys`` input parameters. Examples -------- >>> transform('EPSG:4326', 'EPSG:26953', [-105.0], [40.0]) ([957097.0952383667], [378940.8419189212]) """ # Function is implemented in the _transform C extension module. return _transform(src_crs, dst_crs, xs, ys) def transform_geom( src_crs, dst_crs, geom, antimeridian_cutting=False, antimeridian_offset=10.0, precision=-1): """Transform a geometry obj from one reference system to another. Parameters ---------- src_crs: str or dict A string like 'EPSG:4326' or a dict of proj4 parameters like {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} representing the coordinate reference system on the "source" or "from" side of the transformation. dst_crs: str or dict A string or dict representing the coordinate reference system on the "destination" or "to" side of the transformation. geom: obj A GeoJSON-like geometry object with 'type' and 'coordinates' members. antimeridian_cutting: bool, optional ``True`` to cut output geometries in two at the antimeridian, the default is ``False`. antimeridian_offset: float, optional A distance in decimal degrees from the antimeridian, outside of which geometries will not be cut. precision: int, optional Optional rounding precision of output coordinates, in number of decimal places. Returns ------- obj A new GeoJSON-like geometry with transformed coordinates. Note that if the output is at the antimeridian, it may be cut and of a different geometry ``type`` than the input, e.g., a polygon input may result in multi-polygon output. Examples -------- >>> transform_geom( ... 'EPSG:4326', 'EPSG:26953', ... {'type': 'Point', 'coordinates': [-105.0, 40.0]}) {'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)} """ # Function is implemented in the _transform C extension module. return _transform_geom( src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, precision) Fiona-1.8.21/fiona/vfs.py000066400000000000000000000050101420023252700150740ustar00rootroot00000000000000"""Implementation of Apache VFS schemes and URLs.""" import os import sys import re from fiona.compat import urlparse # Supported URI schemes and their mapping to GDAL's VSI suffix. # TODO: extend for other cloud plaforms. SCHEMES = { 'ftp': 'curl', 'gzip': 'gzip', 'http': 'curl', 'https': 'curl', 's3': 's3', 'tar': 'tar', 'zip': 'zip', 'gs': 'gs', } CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) # TODO: extend for other cloud plaforms. REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) def valid_vsi(vsi): """Ensures all parts of our vsi path are valid schemes.""" return all(p in SCHEMES for p in vsi.split('+')) def is_remote(scheme): if scheme is None: return False return any(p in REMOTESCHEMES for p in scheme.split('+')) def vsi_path(path, vsi=None, archive=None): # If a VSI and archive file are specified, we convert the path to # an OGR VSI path (see cpl_vsi.h). if vsi: prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in vsi.split('+')) if archive: result = '/{0}/{1}{2}'.format(prefix, archive, path) else: result = '/{0}/{1}'.format(prefix, path) else: result = path return result def parse_paths(uri, vfs=None): """Parse a URI or Apache VFS URL into its parts Returns: tuple (path, scheme, archive) """ archive = scheme = None path = uri # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like # URL schemes if sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): return path, None, None if vfs: parts = urlparse(vfs) scheme = parts.scheme archive = parts.path if parts.netloc and parts.netloc != 'localhost': archive = parts.netloc + archive else: parts = urlparse(path) scheme = parts.scheme path = parts.path if parts.netloc and parts.netloc != 'localhost': if scheme.split("+")[-1] in CURLSCHEMES: # We need to deal with cases such as zip+https://server.com/data.zip path = "{}://{}{}".format(scheme.split("+")[-1], parts.netloc, path) else: path = parts.netloc + path if scheme in SCHEMES: parts = path.split('!') path = parts.pop() if parts else None archive = parts.pop() if parts else None scheme = None if not scheme else scheme return path, scheme, archive Fiona-1.8.21/pep-508-install000077500000000000000000000016051420023252700153260ustar00rootroot00000000000000#!/usr/bin/env python """Prototype support for PEP 518: "Specifying Minimum Build System Requirements for Python Projects". A future version of pip will do this for us and we'll remove this script. This script installs Fiona in develop mode (``pip install -e .[test]``). """ import subprocess def main(): # Parse config file for build system requirements. build_system_requirements = None with open('pyproject.toml') as config: for line in config: if line.startswith('requires'): build_system_requirements = line.split('=')[-1] # Install them if found. if build_system_requirements: reqs = eval(build_system_requirements) subprocess.call(['pip', 'install'] + reqs) # Now install our package in editable mode. subprocess.call(['pip', 'install', '-e', '.[test]'] + reqs) if __name__ == '__main__': main() Fiona-1.8.21/pyproject.toml000066400000000000000000000001621420023252700155470ustar00rootroot00000000000000[build-system] requires = [ "cython==0.29.24", "oldest-supported-numpy", "setuptools", "wheel", ] Fiona-1.8.21/pytest.ini000066400000000000000000000010161420023252700146630ustar00rootroot00000000000000[pytest] filterwarnings = ignore:.*Sequential read of iterator was interrupted*:RuntimeWarning ignore:.*negative slices or start values other than zero may be slow*:RuntimeWarning ignore:.*negative step size may be slow*:RuntimeWarning ignore:.*is buggy and will be removed in Fiona 2.0.* markers = iconv: marks tests that require gdal to be compiled with iconv network: marks tests that require a network connection wheel: marks test that only works when installed from wheel testpaths = tests Fiona-1.8.21/readthedocs.yml000066400000000000000000000001101420023252700156340ustar00rootroot00000000000000python: version: 3 pip_install: true conda: file: environment.yml Fiona-1.8.21/requirements-ci.txt000066400000000000000000000000121420023252700165020ustar00rootroot00000000000000coveralls Fiona-1.8.21/requirements-dev.txt000066400000000000000000000002571420023252700167000ustar00rootroot00000000000000-r requirements.txt coverage==4.5.4 cython==0.29.24 mock ; python_version < '3.3' pytest==6.2.5 pytest-cov==2.8.1 setuptools==41.6.0 boto3==1.20.10 wheel==0.33.6 pytz==2020.1 Fiona-1.8.21/requirements.txt000066400000000000000000000001701420023252700161160ustar00rootroot00000000000000attrs==21.2.0 click-plugins==1.0.4 cligj==0.5.0 munch==2.3.2 six==1.16.0 enum34==1.1.6 ; python_version < '3.4' certifi Fiona-1.8.21/scripts/000077500000000000000000000000001420023252700143235ustar00rootroot00000000000000Fiona-1.8.21/scripts/check_deprecated.py000066400000000000000000000031321420023252700201310ustar00rootroot00000000000000import glob import os from collections import defaultdict import re ignored_files = {'_shim.pyx', '_shim1.pyx', '_shim1.pxd', 'ogrext1.pxd'} # List of deprecated methods from https://gdal.org/doxygen/deprecated.html#_deprecated000028 deprecated = { 'CPL_LSBINT16PTR', 'CPL_LSBINT32PTR(x)', 'OGR_Dr_CopyDataSource', 'OGR_Dr_CreateDataSource', 'OGR_Dr_DeleteDataSource', 'OGR_Dr_Open', 'OGR_Dr_TestCapability', 'OGR_DS_CopyLayer', 'OGR_DS_CreateLayer', 'OGR_DS_DeleteLayer', 'OGR_DS_Destroy', 'OGR_DS_ExecuteSQL', 'OGR_DS_GetDriver', 'OGR_DS_GetLayer', 'OGR_DS_GetLayerByName', 'OGR_DS_GetLayerCount', 'OGR_DS_GetName', 'OGR_DS_ReleaseResultSet', 'OGR_DS_TestCapability', 'OGR_G_GetCoordinateDimension', 'OGR_G_SetCoordinateDimension', 'OGRGetDriver', 'OGRGetDriverByName', 'OGRGetDriverCount', 'OGROpen', 'OGROpenShared', 'OGRRegisterAll', 'OGRReleaseDataSource', } found_lines = defaultdict(list) files = glob.glob('fiona/*.pyx') + glob.glob('fiona/*.pxd') for path in files: if os.path.basename(path) in ignored_files: continue with open(path, 'r') as f: for i, line in enumerate(f): for deprecated_method in deprecated: match = re.search('{}\s*\('.format(deprecated_method), line) if match: found_lines[path].append((i+1, line.strip(), deprecated_method)) for path in sorted(found_lines): print(path) for line_nr, line, method in found_lines[path]: print("\t{}\t{}".format(line_nr, line)) print("") Fiona-1.8.21/scripts/check_urls.py000066400000000000000000000022661420023252700170250ustar00rootroot00000000000000import requests import glob import re def test_urls(files): headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 6.0; Fiona CI check)'} for fpath in files: print("Processing: {}".format(fpath)) with open(fpath) as f: text = f.read() urls = re.findall('(https?:\/\/[^\s`>\'"()]+)', text) for url in urls: http_code = None try: r = requests.get(url, headers=headers) http_code = r.status_code warn = '' if not http_code == 200: warn = ' <--- !!!' except Exception as e: warn = str(e) if len(warn) > 0: print("\t {url} HTTP code: {http} {warn}".format(url=url, http=http_code, warn=warn) ) print("Test URLs in documentation") test_urls(glob.glob('**/*.rst', recursive=True)) print('') print('Test URLs in code') test_urls(glob.glob('fiona/**/*.py', recursive=True)) Fiona-1.8.21/scripts/dumpgj000066400000000000000000000043121420023252700155340ustar00rootroot00000000000000#!/usr/bin/env python import logging import sys from fiona.tool import main if __name__ == '__main__': import argparse logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('fiona.tool') parser = argparse.ArgumentParser( description="Serialize a file's records or description to GeoJSON") parser.add_argument('infile', help="input file name") parser.add_argument('outfile', nargs='?', help="output file name, defaults to stdout if omitted", default=sys.stdout) parser.add_argument('-d', '--description', action='store_true', help="serialize file's data description (schema) only") parser.add_argument('-n', '--indent', type=int, default=None, metavar='N', help="indentation level in N number of chars") parser.add_argument('--compact', action='store_true', help="use compact separators (',', ':')") parser.add_argument('--encoding', default=None, metavar='ENC', help="Specify encoding of the input file") parser.add_argument('--record-buffered', dest='record_buffered', action='store_true', help="Economical buffering of writes at record, not collection (default), level") parser.add_argument('--ignore-errors', dest='ignore_errors', action='store_true', help="log errors but do not stop serialization") parser.add_argument('--use-ld-context', dest='use_ld_context', action='store_true', help="add a JSON-LD context to JSON output") parser.add_argument('--add-ld-context-item', dest='ld_context_items', action='append', metavar='TERM=URI', help="map a term to a URI and add it to the output's JSON LD context") args = parser.parse_args() # Keyword args to be used in all following json.dump* calls. dump_kw = {'sort_keys': True} if args.indent: dump_kw['indent'] = args.indent if args.compact: dump_kw['separators'] = (',', ':') item_sep = args.compact and ',' or ', ' ignore_errors = args.ignore_errors sys.exit(main(args, dump_kw, item_sep, ignore_errors)) Fiona-1.8.21/scripts/fiona.insp000066400000000000000000000006661420023252700163220ustar00rootroot00000000000000#!/usr/bin/env python import sys from fiona.inspector import main if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( prog="fiona.insp", description="Open a data file and drop into an interactive interpreter") parser.add_argument( 'src', metavar='FILE', help="Input dataset file name") args = parser.parse_args() sys.exit(main(args.src)) Fiona-1.8.21/scripts/travis_filegdb_install.sh000066400000000000000000000007541420023252700213770ustar00rootroot00000000000000#!/bin/bash # Install filegdb if not already installed if [ ! -d "$FILEGDB" ]; then mkdir -p $FILEGDB cd $FILEGDB wget -q https://github.com/Esri/file-geodatabase-api/raw/master/FileGDB_API_1.5.1/FileGDB_API_1_5_1-64gcc51.tar.gz tar -xzf FileGDB_API_1_5_1-64gcc51.tar.gz --strip=1 FileGDB_API-64gcc51 rm FileGDB_API_1_5_1-64gcc51.tar.gz rm -rf samples rm -rf doc fi export LD_LIBRARY_PATH=$FILEGDB/lib:$LD_LIBRARY_PATH # change back to travis build dir cd $TRAVIS_BUILD_DIR Fiona-1.8.21/scripts/travis_gdal_install.sh000077500000000000000000000073531420023252700207170ustar00rootroot00000000000000#!/bin/bash # # originally contributed by @rbuffat to Toblerity/Fiona set -e GDALOPTS=" --with-ogr \ --with-geos \ --with-expat \ --without-libtool \ --with-libz=internal \ --with-libtiff=internal \ --with-geotiff=internal \ --without-gif \ --without-pg \ --without-grass \ --without-libgrass \ --without-cfitsio \ --without-pcraster \ --with-netcdf \ --with-png=internal \ --with-jpeg=internal \ --without-gif \ --without-ogdi \ --without-fme \ --without-hdf4 \ --without-hdf5 \ --without-jasper \ --without-ecw \ --without-kakadu \ --without-mrsid \ --without-jp2mrsid \ --without-bsb \ --without-grib \ --without-mysql \ --without-ingres \ --without-xerces \ --without-odbc \ --with-curl \ --with-sqlite3 \ --without-idb \ --without-sde \ --without-ruby \ --without-perl \ --without-php \ --without-python \ --with-oci=no \ --without-mrf \ --with-webp=no" if [ -d "$FILEGDB" ]; then GDALOPTS="$GDALOPTS --with-fgdb=$FILEGDB" fi # Create build dir if not exists if [ ! -d "$GDALBUILD" ]; then mkdir $GDALBUILD; fi if [ ! -d "$GDALINST" ]; then mkdir $GDALINST; fi ls -l $GDALINST if [ "$GDALVERSION" = "master" ]; then PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" cd $GDALBUILD git clone --depth 1 https://github.com/OSGeo/gdal gdal-$GDALVERSION cd gdal-$GDALVERSION/gdal echo $PROJVERSION > newproj.txt git rev-parse HEAD > newrev.txt BUILD=no # Only build if nothing cached or if the GDAL revision changed if test ! -f $GDALINST/gdal-$GDALVERSION/rev.txt; then BUILD=yes elif ( ! diff newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt >/dev/null ) || ( ! diff newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt >/dev/null ); then BUILD=yes fi if test "$BUILD" = "yes"; then mkdir -p $GDALINST/gdal-$GDALVERSION cp newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt cp newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT make make install fi else case "$GDALVERSION" in 3*) PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" ;; 2.4*) PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" ;; 2.3*) PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" ;; 2.2*) PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" ;; 2.1*) PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" ;; 2.0*) PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" ;; 1*) PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" ;; *) PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" ;; esac if [ ! -d "$GDALINST/gdal-$GDALVERSION/share/gdal" ]; then cd $GDALBUILD gdalver=$(expr "$GDALVERSION" : '\([0-9]*.[0-9]*.[0-9]*\)') wget -q http://download.osgeo.org/gdal/$gdalver/gdal-$GDALVERSION.tar.gz tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$gdalver ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT make make install fi fi # change back to travis build dir cd $TRAVIS_BUILD_DIR Fiona-1.8.21/scripts/travis_proj_install.sh000066400000000000000000000011551420023252700207510ustar00rootroot00000000000000#!/bin/sh set -e # Create build dir if not exists if [ ! -d "$PROJBUILD" ]; then mkdir $PROJBUILD; fi if [ ! -d "$PROJINST" ]; then mkdir $PROJINST; fi ls -l $PROJINST echo "PROJ VERSION: $PROJVERSION" if [ ! -d "$PROJINST/gdal-$GDALVERSION/share/proj" ]; then cd $PROJBUILD wget -q https://download.osgeo.org/proj/proj-$PROJVERSION.tar.gz tar -xzf proj-$PROJVERSION.tar.gz projver=$(expr "$PROJVERSION" : '\([0-9]*.[0-9]*.[0-9]*\)') cd proj-$projver ./configure --prefix=$PROJINST/gdal-$GDALVERSION make -s make install fi # change back to travis build dir cd $TRAVIS_BUILD_DIR Fiona-1.8.21/setup.py000066400000000000000000000316431420023252700143550ustar00rootroot00000000000000from distutils.command.sdist import sdist from distutils import log import itertools as it import os import shutil import subprocess import sys from setuptools import setup from setuptools.extension import Extension # Use Cython if available. try: from Cython.Build import cythonize except ImportError: cythonize = None def check_output(cmd): # since subprocess.check_output doesn't exist in 2.6 # we wrap it here. try: out = subprocess.check_output(cmd) return out.decode('utf') except AttributeError: # For some reasone check_output doesn't exist # So fall back on Popen p = subprocess.Popen(cmd, stdout=subprocess.PIPE) out, err = p.communicate() return out def copy_data_tree(datadir, destdir): try: shutil.rmtree(destdir) except OSError: pass shutil.copytree(datadir, destdir) # Parse the version from the fiona module. with open('fiona/__init__.py', 'r') as f: for line in f: if line.find("__version__") >= 0: version = line.split("=")[1].strip() version = version.strip('"') version = version.strip("'") break # Fiona's auxiliary files are UTF-8 encoded and we'll specify this when # reading with Python 3+ open_kwds = {} if sys.version_info > (3,): open_kwds['encoding'] = 'utf-8' with open('VERSION.txt', 'w', **open_kwds) as f: f.write(version) with open('README.rst', **open_kwds) as f: readme = f.read() with open('CREDITS.txt', **open_kwds) as f: credits = f.read() with open('CHANGES.txt', **open_kwds) as f: changes = f.read() # Set a flag for builds where the source directory is a repo checkout. source_is_repo = os.path.exists("MANIFEST.in") # Extend distutil's sdist command to generate C extension sources from # the _shim extension modules for GDAL 1.x and 2.x. class sdist_multi_gdal(sdist): def run(self): sources = { "_shim1": "_shim", "_shim2": "_shim", "_shim22": "_shim", "_shim3": "_shim" } for src_a, src_b in sources.items(): shutil.copy('fiona/{}.pyx'.format(src_a), 'fiona/{}.pyx'.format(src_b)) _ = check_output(['cython', '-v', '-f', 'fiona/{}.pyx'.format(src_b), '-o', 'fiona/{}.c'.format(src_a)]) print(_) sdist.run(self) # Building Fiona requires options that can be obtained from GDAL's gdal-config # program or can be specified using setup arguments. The latter override the # former. # # A GDAL API version is strictly required. Without this the setup script # cannot know whether to use the GDAL version 1 or 2 source files. The GDAL # API version can be specified in 2 ways. # # 1. By the gdal-config program, optionally pointed to by GDAL_CONFIG # 2. By a GDAL_VERSION environment variable. This overrides number 1. include_dirs = [] library_dirs = [] libraries = [] extra_link_args = [] gdal_output = [None for i in range(4)] gdalversion = None language = None if 'clean' not in sys.argv: try: gdal_config = os.environ.get('GDAL_CONFIG', 'gdal-config') for i, flag in enumerate( ["--cflags", "--libs", "--datadir", "--version"]): gdal_output[i] = check_output([gdal_config, flag]).strip() for item in gdal_output[0].split(): if item.startswith("-I"): include_dirs.extend(item[2:].split(":")) for item in gdal_output[1].split(): if item.startswith("-L"): library_dirs.extend(item[2:].split(":")) elif item.startswith("-l"): libraries.append(item[2:]) else: # e.g. -framework GDAL extra_link_args.append(item) gdalversion = gdal_output[3] if gdalversion: log.info("GDAL API version obtained from gdal-config: %s", gdalversion) except Exception as e: if os.name == "nt": log.info("Building on Windows requires extra options to setup.py " "to locate needed GDAL files.\nMore information is " "available in the README.") else: log.warn("Failed to get options via gdal-config: %s", str(e)) # Get GDAL API version from environment variable. if 'GDAL_VERSION' in os.environ: gdalversion = os.environ['GDAL_VERSION'] log.info("GDAL API version obtained from environment: %s", gdalversion) # Get GDAL API version from the command line if specified there. if '--gdalversion' in sys.argv: index = sys.argv.index('--gdalversion') sys.argv.pop(index) gdalversion = sys.argv.pop(index) log.info("GDAL API version obtained from command line option: %s", gdalversion) if not gdalversion: log.fatal("A GDAL API version must be specified. Provide a path " "to gdal-config using a GDAL_CONFIG environment variable " "or use a GDAL_VERSION environment variable.") sys.exit(1) if os.environ.get('PACKAGE_DATA'): destdir = 'fiona/gdal_data' if gdal_output[2]: log.info("Copying gdal data from %s" % gdal_output[2]) copy_data_tree(gdal_output[2], destdir) else: # check to see if GDAL_DATA is defined gdal_data = os.environ.get('GDAL_DATA', None) if gdal_data: log.info("Copying gdal data from %s" % gdal_data) copy_data_tree(gdal_data, destdir) # Conditionally copy PROJ.4 data. projdatadir = os.environ.get('PROJ_LIB', '/usr/local/share/proj') if os.path.exists(projdatadir): log.info("Copying proj data from %s" % projdatadir) copy_data_tree(projdatadir, 'fiona/proj_data') if "--cython-language" in sys.argv: index = sys.argv.index("--cython-language") sys.argv.pop(index) language = sys.argv.pop(index).lower() gdal_version_parts = gdalversion.split('.') gdal_major_version = int(gdal_version_parts[0]) gdal_minor_version = int(gdal_version_parts[1]) log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) ext_options = dict( include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args) # Enable coverage for cython pyx files. if os.environ.get('CYTHON_COVERAGE'): from Cython.Compiler.Options import get_directive_defaults directive_defaults = get_directive_defaults() directive_defaults['linetrace'] = True directive_defaults['binding'] = True ext_options.update(dict( define_macros=[("CYTHON_TRACE_NOGIL", "1")])) # GDAL 2.3+ requires C++11 if language == "c++": ext_options["language"] = "c++" if sys.platform != "win32": ext_options["extra_compile_args"] = ["-std=c++11"] ext_options_cpp = ext_options.copy() if sys.platform != "win32": ext_options_cpp["extra_compile_args"] = ["-std=c++11"] # Define the extension modules. ext_modules = [] if source_is_repo and "clean" not in sys.argv: # When building from a repo, Cython is required. log.info("MANIFEST.in found, presume a repo, cythonizing...") if not cythonize: log.fatal("Cython.Build.cythonize not found. " "Cython is required to build from a repo.") sys.exit(1) if gdalversion.startswith("1"): shutil.copy('fiona/_shim1.pyx', 'fiona/_shim.pyx') shutil.copy('fiona/_shim1.pxd', 'fiona/_shim.pxd') elif gdal_major_version == 2: if gdal_minor_version >= 2: log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) shutil.copy('fiona/_shim22.pyx', 'fiona/_shim.pyx') shutil.copy('fiona/_shim22.pxd', 'fiona/_shim.pxd') else: log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) shutil.copy('fiona/_shim2.pyx', 'fiona/_shim.pyx') shutil.copy('fiona/_shim2.pxd', 'fiona/_shim.pxd') elif gdal_major_version == 3: shutil.copy('fiona/_shim3.pyx', 'fiona/_shim.pyx') shutil.copy('fiona/_shim3.pxd', 'fiona/_shim.pxd') ext_modules = cythonize([ Extension('fiona._geometry', ['fiona/_geometry.pyx'], **ext_options), Extension('fiona.schema', ['fiona/schema.pyx'], **ext_options), Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options_cpp), Extension('fiona._crs', ['fiona/_crs.pyx'], **ext_options), Extension('fiona._env', ['fiona/_env.pyx'], **ext_options), Extension('fiona._err', ['fiona/_err.pyx'], **ext_options), Extension('fiona._shim', ['fiona/_shim.pyx'], **ext_options), Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options) ], compiler_directives={"language_level": "3"} ) # If there's no manifest template, as in an sdist, we just specify .c files. elif "clean" not in sys.argv: ext_modules = [ Extension('fiona.schema', ['fiona/schema.c'], **ext_options), Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options_cpp), Extension('fiona._geometry', ['fiona/_geometry.c'], **ext_options), Extension('fiona._crs', ['fiona/_crs.c'], **ext_options), Extension('fiona._env', ['fiona/_env.c'], **ext_options), Extension('fiona._err', ['fiona/_err.c'], **ext_options), Extension('fiona.ogrext', ['fiona/ogrext.c'], **ext_options), ] if gdal_major_version == 1: log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) ext_modules.append( Extension('fiona._shim', ['fiona/_shim1.c'], **ext_options)) elif gdal_major_version == 2: if gdal_minor_version >= 2: log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) ext_modules.append( Extension('fiona._shim', ['fiona/_shim22.c'], **ext_options)) else: log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) ext_modules.append( Extension('fiona._shim', ['fiona/_shim2.c'], **ext_options)) elif gdal_major_version == 3: log.info("Building Fiona for gdal >= 3.0.x: {0}".format(gdalversion)) ext_modules.append( Extension('fiona._shim', ['fiona/_shim3.c'], **ext_options)) requirements = [ 'attrs>=17', 'certifi', 'click>=4.0', 'cligj>=0.5', 'click-plugins>=1.0', 'six>=1.7', 'munch', "setuptools", 'argparse; python_version < "2.7"', 'ordereddict; python_version < "2.7"', 'enum34; python_version < "3.4"' ] # Python 3.10 workaround as enum34 not available if sys.version_info >= (3, 10): requirements.remove('enum34; python_version < "3.4"') extras_require = { 'calc': ['shapely'], 's3': ['boto3>=1.2.4'], 'test': ['pytest>=3', 'pytest-cov', 'boto3>=1.2.4', 'mock; python_version < "3.4"'] } extras_require['all'] = list(set(it.chain(*extras_require.values()))) setup_args = dict( cmdclass={'sdist': sdist_multi_gdal}, metadata_version='1.2', name='Fiona', version=version, requires_python='>=2.6', requires_external='GDAL (>=1.8)', description="Fiona reads and writes spatial data files", license='BSD', keywords='gis vector feature data', author='Sean Gillies', author_email='sean.gillies@gmail.com', maintainer='Sean Gillies', maintainer_email='sean.gillies@gmail.com', url='http://github.com/Toblerity/Fiona', long_description=readme + "\n" + changes + "\n" + credits, package_dir={'': '.'}, packages=['fiona', 'fiona.fio'], entry_points=''' [console_scripts] fio=fiona.fio.main:main_group [fiona.fio_commands] bounds=fiona.fio.bounds:bounds calc=fiona.fio.calc:calc cat=fiona.fio.cat:cat collect=fiona.fio.collect:collect distrib=fiona.fio.distrib:distrib dump=fiona.fio.dump:dump env=fiona.fio.env:env filter=fiona.fio.filter:filter info=fiona.fio.info:info insp=fiona.fio.insp:insp load=fiona.fio.load:load ls=fiona.fio.ls:ls rm=fiona.fio.rm:rm ''', install_requires=requirements, extras_require=extras_require, ext_modules=ext_modules, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Topic :: Scientific/Engineering :: GIS']) if os.environ.get('PACKAGE_DATA'): setup_args['package_data'] = {'fiona': ['gdal_data/*', 'proj_data/*', '.libs/*', '.libs/licenses/*']} setup(**setup_args) Fiona-1.8.21/tests/000077500000000000000000000000001420023252700137765ustar00rootroot00000000000000Fiona-1.8.21/tests/__init__.py000066400000000000000000000006351420023252700161130ustar00rootroot00000000000000"""Do not delete! At least one of the ``unittest.TestCase()`` based tests do a relative import inside the ``tests`` directory to use another test as a base class. This file can probably be deleted if that condition is removed. For example: $ git grep 'from \.' | grep test tests/test_layer.py:from .test_collection import TestReading tests/test_vfs.py:from .test_collection import TestReading """ Fiona-1.8.21/tests/conftest.py000066400000000000000000000404711420023252700162030ustar00rootroot00000000000000"""pytest fixtures and automatic test data generation.""" import copy import json import os.path import shutil import tarfile import zipfile from collections import OrderedDict from click.testing import CliRunner import pytest import fiona from fiona.crs import from_epsg from fiona.env import GDALVersion driver_extensions = {'DXF': 'dxf', 'CSV': 'csv', 'ESRI Shapefile': 'shp', 'FileGDB': 'gdb', 'GML': 'gml', 'GPX': 'gpx', 'GPSTrackMaker': 'gtm', 'MapInfo File': 'tab', 'DGN': 'dgn', 'GPKG': 'gpkg', 'GeoJSON': 'json', 'GeoJSONSeq': 'geojsons', 'GMT': 'gmt', 'OGR_GMT': 'gmt', 'BNA': 'bna', 'FlatGeobuf': 'fgb'} def pytest_report_header(config): headers = [] # gdal version number gdal_release_name = fiona.get_gdal_release_name() headers.append('GDAL: {} ({})'.format(gdal_release_name, fiona.get_gdal_version_num())) supported_drivers = ", ".join(sorted(list(fiona.drvsupport.supported_drivers.keys()))) # supported drivers headers.append("Supported drivers: {}".format(supported_drivers)) return '\n'.join(headers) def get_temp_filename(driver): basename = "foo" extension = driver_extensions.get(driver, "bar") prefix = "" if driver == 'GeoJSONSeq': prefix = "GeoJSONSeq:" return "{prefix}{basename}.{extension}".format(prefix=prefix, basename=basename, extension=extension) _COUTWILDRNP_FILES = [ 'coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] def _read_file(name): with open(os.path.join(os.path.dirname(__file__), name)) as f: return f.read() has_gpkg = "GPKG" in fiona.drvsupport.supported_drivers.keys() has_gpkg_reason = "Requires geopackage driver" requires_gpkg = pytest.mark.skipif(not has_gpkg, reason=has_gpkg_reason) @pytest.fixture(scope='function') def gdalenv(request): import fiona.env def fin(): if fiona.env.local._env: fiona.env.delenv() fiona.env.local._env = None request.addfinalizer(fin) @pytest.fixture(scope='session') def data_dir(): """Absolute file path to the directory containing test datasets.""" return os.path.abspath(os.path.join(os.path.dirname(__file__), 'data')) @pytest.fixture(scope='function') def data(tmpdir, data_dir): """A temporary directory containing a copy of the files in data.""" for filename in _COUTWILDRNP_FILES: shutil.copy(os.path.join(data_dir, filename), str(tmpdir)) return tmpdir @pytest.fixture(scope='session') def path_curves_line_csv(data_dir): """Path to ```curves_line.csv``""" return os.path.join(data_dir, 'curves_line.csv') @pytest.fixture(scope='session') def path_test_tin_shp(data_dir): """Path to ```test_tin.shp``""" return os.path.join(data_dir, 'test_tin.shp') @pytest.fixture(scope='session') def path_test_tin_csv(data_dir): """Path to ```test_tin.csv``""" return os.path.join(data_dir, 'test_tin.csv') @pytest.fixture(scope='session') def path_coutwildrnp_shp(data_dir): """Path to ```coutwildrnp.shp``""" return os.path.join(data_dir, 'coutwildrnp.shp') @pytest.fixture(scope='session') def path_coutwildrnp_zip(data_dir): """Creates ``coutwildrnp.zip`` if it does not exist and returns the absolute file path.""" path = os.path.join(data_dir, 'coutwildrnp.zip') if not os.path.exists(path): with zipfile.ZipFile(path, 'w') as zip: for filename in _COUTWILDRNP_FILES: zip.write(os.path.join(data_dir, filename), filename) return path @pytest.fixture(scope='session') def path_grenada_geojson(data_dir): """Path to ```grenada.geojson```""" return os.path.join(data_dir, 'grenada.geojson') @pytest.fixture(scope='session') def bytes_coutwildrnp_zip(path_coutwildrnp_zip): """The zip file's bytes""" with open(path_coutwildrnp_zip, 'rb') as src: return src.read() @pytest.fixture(scope='session') def path_coutwildrnp_tar(data_dir): """Creates ``coutwildrnp.tar`` if it does not exist and returns the absolute file path.""" path = os.path.join(data_dir, 'coutwildrnp.tar') if not os.path.exists(path): with tarfile.open(path, 'w') as tar: for filename in _COUTWILDRNP_FILES: tar.add( os.path.join(data_dir, filename), arcname=os.path.join('testing', filename)) return path @pytest.fixture(scope='session') def path_coutwildrnp_json(data_dir): """Creates ``coutwildrnp.json`` if it does not exist and returns the absolute file path.""" path = os.path.join(data_dir, 'coutwildrnp.json') if not os.path.exists(path): name = _COUTWILDRNP_FILES[0] with fiona.open(os.path.join(data_dir, name), 'r') as source: features = [feat for feat in source] my_layer = { 'type': 'FeatureCollection', 'features': features} with open(path, 'w') as f: f.write(json.dumps(my_layer)) return path @pytest.fixture(scope='session') def bytes_grenada_geojson(path_grenada_geojson): """The geojson as bytes.""" with open(path_grenada_geojson, 'rb') as src: return src.read() @pytest.fixture(scope='session') def path_coutwildrnp_gpkg(data_dir): """Creates ``coutwildrnp.gpkg`` if it does not exist and returns the absolute file path.""" if not has_gpkg: raise RuntimeError("GDAL has not been compiled with GPKG support") path = os.path.join(data_dir, 'coutwildrnp.gpkg') if not os.path.exists(path): filename_shp = _COUTWILDRNP_FILES[0] path_shp = os.path.join(data_dir, filename_shp) with fiona.open(path_shp, "r") as src: meta = copy.deepcopy(src.meta) meta["driver"] = "GPKG" with fiona.open(path, "w", **meta) as dst: dst.writerecords(src) return path @pytest.fixture(scope='session') def path_gpx(data_dir): return os.path.join(data_dir, 'test_gpx.gpx') @pytest.fixture(scope='session') def feature_collection(): """GeoJSON feature collection on a single line.""" return _read_file(os.path.join('data', 'collection.txt')) @pytest.fixture(scope='session') def feature_collection_pp(): """Same as above but with pretty-print styling applied.""" return _read_file(os.path.join('data', 'collection-pp.txt')) @pytest.fixture(scope='session') def feature_seq(): """One feature per line.""" return _read_file(os.path.join('data', 'sequence.txt')) @pytest.fixture(scope='session') def feature_seq_pp_rs(): """Same as above but each feature has pretty-print styling""" return _read_file(os.path.join('data', 'sequence-pp.txt')) @pytest.fixture(scope='session') def runner(): """Returns a ```click.testing.CliRunner()`` instance.""" return CliRunner() @pytest.fixture(scope='class') def uttc_path_coutwildrnp_zip(path_coutwildrnp_zip, request): """Make the ``path_coutwildrnp_zip`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_zip = path_coutwildrnp_zip @pytest.fixture(scope='class') def uttc_path_coutwildrnp_tar(path_coutwildrnp_tar, request): """Make the ``path_coutwildrnp_tar`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_tar = path_coutwildrnp_tar @pytest.fixture(scope='class') def uttc_path_coutwildrnp_json(path_coutwildrnp_json, request): """Make the ``path_coutwildrnp_json`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_json = path_coutwildrnp_json @pytest.fixture(scope='class') def uttc_data_dir(data_dir, request): """Make the ``data_dir`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.data_dir = data_dir @pytest.fixture(scope='class') def uttc_path_gpx(path_gpx, request): """Make the ``path_gpx`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_gpx = path_gpx # GDAL 2.3.x silently converts ESRI WKT to OGC WKT # The regular expression below will match against either WGS84PATTERN = 'GEOGCS\["(?:GCS_WGS_1984|WGS 84)",DATUM\["WGS_1984",SPHEROID\["WGS[_ ]84"' # Define helpers to skip tests based on GDAL version gdal_version = GDALVersion.runtime() requires_only_gdal1 = pytest.mark.skipif( gdal_version.major != 1, reason="Only relevant for GDAL 1.x") requires_gdal2 = pytest.mark.skipif( not gdal_version.major >= 2, reason="Requires GDAL 2.x") requires_gdal21 = pytest.mark.skipif( not gdal_version.at_least('2.1'), reason="Requires GDAL 2.1.x") requires_gdal22 = pytest.mark.skipif( not gdal_version.at_least('2.2'), reason="Requires GDAL 2.2.x") requires_gdal24 = pytest.mark.skipif( not gdal_version.at_least('2.4'), reason="Requires GDAL 2.4.x") requires_gdal_lt_3 = pytest.mark.skipif( not gdal_version.major < 3, reason="Requires GDAL < 3") requires_gdal3 = pytest.mark.skipif( not gdal_version.major >= 3, reason="Requires GDAL 3.x") travis_only = pytest.mark.skipif( not os.getenv("TRAVIS", "false") == "true", reason="Requires travis CI environment" ) @pytest.fixture(scope="class") def unittest_data_dir(data_dir, request): """Makes data_dir available to unittest tests""" request.cls.data_dir = data_dir @pytest.fixture(scope="class") def unittest_path_coutwildrnp_shp(path_coutwildrnp_shp, request): """Makes shapefile path available to unittest tests""" request.cls.path_coutwildrnp_shp = path_coutwildrnp_shp @pytest.fixture() def testdata_generator(): """ Helper function to create test data sets for ideally all supported drivers """ def get_schema(driver): special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}, 'BNA': {'geometry': 'Point', 'properties': {}}, 'DXF': {'properties': OrderedDict( [('Layer', 'str'), ('SubClasses', 'str'), ('Linetype', 'str'), ('EntityHandle', 'str'), ('Text', 'str')]), 'geometry': 'Point'}, 'GPX': {'geometry': 'Point', 'properties': OrderedDict([('ele', 'float'), ('time', 'datetime')])}, 'GPSTrackMaker': {'properties': OrderedDict([]), 'geometry': 'Point'}, 'DGN': {'properties': OrderedDict([]), 'geometry': 'LineString'}, 'MapInfo File': {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} } return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) def get_crs(driver): special_crs = {'MapInfo File': from_epsg(4326)} return special_crs.get(driver, None) def get_records(driver, range): special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], 'BNA': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {}} for i in range], 'DXF': [ {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': OrderedDict( [('Layer', '0'), ('SubClasses', 'AcDbEntity:AcDbPoint'), ('Linetype', None), ('EntityHandle', str(i + 20000)), ('Text', None)])} for i in range], 'GPX': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'ele': 0.0, 'time': '2020-03-24T16:08:40+00:00'}} for i in range], 'GPSTrackMaker': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {}} for i in range], 'DGN': [ {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, 'properties': {}} for i in range], 'MapInfo File': [ {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': str(i)}} for i in range], 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, 'properties': {'position': i}} for i in range] } return special_records1.get(driver, [ {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in range]) def get_records2(driver, range): special_records2 = {'DGN': [ {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, 'properties': OrderedDict( [('Type', 4), ('Level', 0), ('GraphicGroup', 0), ('ColorIndex', 0), ('Weight', 0), ('Style', 0), ('EntityNum', None), ('MSLink', None), ('Text', None)])} for i in range], } return special_records2.get(driver, get_records(driver, range)) def get_create_kwargs(driver): kwargs = { 'FlatGeobuf': {'SPATIAL_INDEX': False} } return kwargs.get(driver, {}) def test_equal(driver, val_in, val_out): is_good = True is_good = is_good and val_in['geometry'] == val_out['geometry'] for key in val_in['properties']: if key in val_out['properties']: if driver == 'FileGDB' and isinstance(val_in['properties'][key], int): is_good = is_good and str(val_in['properties'][key]) == str(int(val_out['properties'][key])) else: is_good = is_good and str(val_in['properties'][key]) == str(val_out['properties'][key]) else: is_good = False return is_good def _testdata_generator(driver, range1, range2): """ Generate test data and helper methods for a specific driver. Each set of generated set of records contains the position specified with range. These positions are either encoded as field or in the geometry of the record, depending of the driver characteristics. Parameters ---------- driver: str Name of drive to generate tests for range1: list of integer Range of positions for first set of records range2: list of integer Range of positions for second set of records Returns ------- schema A schema for the records crs A crs for the records records1 A set of records containing the positions of range1 records2 A set of records containing the positions of range2 test_equal A function that returns True if the geometry is equal between the generated records and a record and if the properties of the generated records can be found in a record """ return get_schema(driver), get_crs(driver), get_records(driver, range1), get_records2(driver, range2),\ test_equal, get_create_kwargs(driver) return _testdata_generator @pytest.fixture(scope='session') def path_test_tz_geojson(data_dir): """Path to ```test_tz.geojson``""" return os.path.join(data_dir, 'test_tz.geojson') Fiona-1.8.21/tests/data/000077500000000000000000000000001420023252700147075ustar00rootroot00000000000000Fiona-1.8.21/tests/data/!test.geojson000066400000000000000000000311721420023252700173210ustar00rootroot00000000000000{"features":[{"geometry":{"coordinates":[[[[-61.173214300000005,12.516654800000001],[-61.3827217,12.5301363],[-61.665747100000004,12.5966532],[-61.6661847,12.596],[-61.66814250000001,12.593],[-61.6700247,12.59],[-61.6718337,12.587],[-61.673571700000004,12.584],[-61.6752407,12.581],[-61.6768427,12.578],[-61.678379400000004,12.575000000000001],[-61.6803295,12.571],[-61.6830501,12.565000000000001],[-61.68553430000001,12.559000000000001],[-61.687063699999996,12.555000000000001],[-61.6884946,12.551],[-61.6898391,12.546999999999999],[-61.69209600000001,12.540999999999999],[-61.69413360000001,12.535],[-61.69595870000001,12.529],[-61.697577200000005,12.523],[-61.69899410000001,12.517],[-61.700213700000006,12.511],[-61.7012395,12.505],[-61.7020744,12.499],[-61.702626200000005,12.494],[-61.7033841,12.493],[-61.706211800000005,12.491],[-61.7089415,12.489],[-61.7141311,12.485000000000001],[-61.718995500000005,12.481],[-61.72356890000001,12.477],[-61.727879200000004,12.473],[-61.7319495,12.469000000000001],[-61.73579920000001,12.465000000000002],[-61.74032590000001,12.46],[-61.74373590000001,12.456000000000001],[-61.746971,12.452000000000002],[-61.7500412,12.447999999999999],[-61.75295580000001,12.443999999999999],[-61.753784499999995,12.443],[-61.756858300000005,12.44],[-61.7598054,12.437],[-61.762633400000006,12.434],[-61.76534870000001,12.431],[-61.767957200000005,12.427999999999999],[-61.7704641,12.425],[-61.7728741,12.422],[-61.775191500000005,12.419],[-61.7774201,12.416],[-61.7802595,12.412],[-61.782954800000006,12.408],[-61.78551270000001,12.404],[-61.7873446,12.401],[-61.789675900000006,12.397],[-61.7918847,12.393],[-61.79397550000001,12.389000000000001],[-61.794998400000004,12.388],[-61.79830060000001,12.386000000000001],[-61.8030062,12.383000000000001],[-61.8059936,12.381],[-61.810272399999995,12.378],[-61.8130009,12.376000000000001],[-61.815637599999995,12.374],[-61.8181882,12.372000000000002],[-61.82186339999999,12.369000000000002],[-61.8265048,12.365000000000002],[-61.830876599999996,12.361],[-61.8329692,12.359000000000002],[-61.835999,12.356000000000002],[-61.8413082,12.351],[-61.845319800000006,12.347],[-61.8464439,12.346],[-61.8501187,12.343],[-61.853625699999995,12.34],[-61.85697739999999,12.337],[-61.86122339999999,12.333],[-61.864252900000004,12.33],[-61.8671584,12.327],[-61.8699469,12.324],[-61.872645999999996,12.321],[-61.8754727,12.318],[-61.87906749999999,12.314],[-61.8833,12.309000000000001],[-61.88726319999999,12.304],[-61.88952,12.301],[-61.891690399999995,12.297999999999998],[-61.8937778,12.295],[-61.895785200000006,12.292],[-61.89771530000001,12.289],[-61.899570800000006,12.286],[-61.90251490000001,12.280999999999999],[-61.904753,12.277],[-61.9068719,12.273],[-61.908875900000005,12.269],[-61.911674299999994,12.263],[-61.9134062,12.259],[-61.9150578,12.255],[-61.9179797,12.248999999999999],[-61.920656900000004,12.242999999999999],[-61.92290190000001,12.238999999999999],[-61.925082,12.235],[-61.92666,12.232],[-61.9286637,12.227999999999998],[-61.930556100000004,12.223999999999998],[-61.9332651,12.217999999999998],[-61.936145100000005,12.212],[-61.938782200000006,12.206],[-61.943587599999994,12.193999999999999],[-61.94511500000001,12.19],[-61.9465439,12.186],[-61.9485074,12.18],[-61.95028749999999,12.174],[-61.95186999999999,12.168],[-61.9532519,12.162],[-61.95443739999999,12.156],[-61.954975999999995,12.154],[-61.9570107,12.147999999999998],[-61.9594482,12.139999999999999],[-61.961132600000006,12.133999999999999],[-61.962614,12.127999999999998],[-61.96295200000001,12.126999999999999],[-61.9668105,12.122],[-61.9704259,12.116999999999999],[-61.9738135,12.112],[-61.9769866,12.107],[-61.9799566,12.102],[-61.9827336,12.097],[-61.9853262,12.092],[-61.9882048,12.086],[-61.990875800000005,12.08],[-61.99252880000001,12.076],[-61.994819,12.07],[-61.996888999999996,12.064],[-61.99874590000001,12.058],[-62.000395600000004,12.052000000000001],[-62.0018433,12.046],[-62.0030933,12.04],[-62.003818700000004,12.036],[-62.0047472,12.03],[-62.0052609,12.026],[-62.005875200000006,12.02],[-62.0061812,12.016],[-62.0064861,12.01],[-62.0065868,12.006],[-62.006584499999995,12],[-62.006398100000006,11.994],[-62.0061714,11.99],[-62.0056768,11.984],[-62.0052436,11.98],[-62.004436999999996,11.974],[-62.003794,11.97],[-62.0026693,11.964],[-62.001811399999994,11.96],[-62.0003595,11.954],[-61.999279800000004,11.950000000000001],[-61.9974886,11.943999999999999],[-61.9961776,11.94],[-61.9940313,11.934],[-61.9924772,11.93],[-61.9908218,11.926],[-61.989062399999995,11.922],[-61.9871961,11.918],[-61.984707699999994,11.913],[-61.9825882,11.909],[-61.9803498,11.905000000000001],[-61.9773776,11.9],[-61.9748543,11.896],[-61.972195400000004,11.892000000000001],[-61.9693945,11.888],[-61.9664442,11.884],[-61.9641286,11.881],[-61.9617206,11.878],[-61.959215900000004,11.875000000000002],[-61.9557177,11.871],[-61.9520267,11.867],[-61.9496952,11.864],[-61.94728729999999,11.861],[-61.9430571,11.856000000000002],[-61.93853550000001,11.851],[-61.934690599999996,11.847],[-61.9306255,11.843],[-61.9274208,11.84],[-61.922921800000005,11.836],[-61.9193636,11.833],[-61.9156332,11.83],[-61.911715,11.827],[-61.9075906,11.824],[-61.903238,11.821],[-61.89863020000001,11.818],[-61.8937341,11.815000000000001],[-61.888507499999996,11.812000000000001],[-61.88481339999999,11.81],[-61.8789067,11.807],[-61.87468659999999,11.805000000000001],[-61.870200499999996,11.803],[-61.86540230000001,11.801],[-61.8602301,11.799],[-61.854597299999995,11.796999999999999],[-61.848375600000004,11.795],[-61.84498479999999,11.793999999999999],[-61.8413608,11.793],[-61.8374527,11.792],[-61.8331873,11.790999999999999],[-61.828452500000004,11.79],[-61.8230605,11.789],[-61.81664609999999,11.787999999999998],[-61.808274399999995,11.786999999999999],[-61.790283900000006,11.786],[-61.7840631,11.786],[-61.76607270000001,11.786999999999999],[-61.7573236,11.787999999999998],[-61.73933300000001,11.789],[-61.730961300000004,11.79],[-61.72079310000001,11.790999999999999],[-61.70280230000001,11.792],[-61.6944305,11.793],[-61.688016000000005,11.793999999999999],[-61.6826238,11.795],[-61.6732043,11.796999999999999],[-61.667812100000006,11.797999999999998],[-61.663077200000004,11.799],[-61.6588117,11.8],[-61.654903499999996,11.801],[-61.6512793,11.802000000000001],[-61.64788839999999,11.803],[-61.644693499999995,11.804],[-61.63878580000001,11.806000000000001],[-61.636033600000005,11.807],[-61.6308613,11.809000000000001],[-61.62841970000001,11.81],[-61.623784,11.812000000000001],[-61.621576700000006,11.813],[-61.6173564,11.815000000000001],[-61.6133668,11.817],[-61.60957990000001,11.819],[-61.6042318,11.822000000000001],[-61.6008621,11.824],[-61.5976324,11.826],[-61.5930244,11.829],[-61.590096,11.831],[-61.5872727,11.833],[-61.585739600000004,11.834],[-61.5816382,11.836],[-61.5758831,11.839],[-61.5705345,11.842],[-61.565532900000015,11.845],[-61.56375870000001,11.846],[-61.55785109999999,11.849],[-61.552374300000004,11.852000000000002],[-61.5472238,11.855000000000002],[-61.543925300000005,11.857000000000001],[-61.5407605,11.859000000000002],[-61.5362404,11.862000000000002],[-61.533365200000006,11.864],[-61.530591300000005,11.866000000000001],[-61.52791200000001,11.868],[-61.5240577,11.871],[-61.5215904,11.873000000000001],[-61.5180317,11.876000000000001],[-61.515748300000006,11.878],[-61.5124482,11.881],[-61.5103269,11.883000000000001],[-61.5072563,11.886000000000001],[-61.49835930000001,11.895000000000001],[-61.494617399999996,11.899000000000001],[-61.4902146,11.904],[-61.48533390000001,11.909],[-61.4816423,11.913],[-61.47729749999999,11.918],[-61.4732301,11.923],[-61.4694197,11.927999999999999],[-61.464353900000006,11.935],[-61.4615887,11.939],[-61.458328800000004,11.943999999999999],[-61.4552762,11.949],[-61.452420399999994,11.954],[-61.450271099999995,11.958],[-61.4482377,11.962000000000002],[-61.4454269,11.967],[-61.4438039,11.97],[-61.4412339,11.975000000000001],[-61.4388714,11.979000000000001],[-61.436091399999995,11.984],[-61.43451230000001,11.987],[-61.4329978,11.99],[-61.4310762,11.994],[-61.428396400000004,12],[-61.42595080000001,12.006],[-61.423730500000005,12.012],[-61.42313910000001,12.013],[-61.4211047,12.016],[-61.41851320000001,12.02],[-61.4166569,12.023],[-61.41487299999999,12.026],[-61.4131591,12.029],[-61.4109797,12.033],[-61.409422,12.036],[-61.40744449999999,12.04],[-61.405577300000004,12.043999999999999],[-61.4038171,12.047999999999998],[-61.402161,12.052000000000001],[-61.399865999999996,12.058],[-61.39845880000001,12.062000000000001],[-61.3971473,12.066],[-61.3959295,12.07],[-61.394275,12.076],[-61.393596300000006,12.078],[-61.3910564,12.081],[-61.38782199999999,12.085],[-61.3855047,12.088],[-61.382552100000005,12.092],[-61.3804362,12.095],[-61.37774039999999,12.099],[-61.3758089,12.102],[-61.3733493,12.106],[-61.37158839999999,12.109],[-61.369348200000005,12.113],[-61.367746499999996,12.116],[-61.36571180000001,12.12],[-61.3637893,12.123999999999999],[-61.3619754,12.127999999999998],[-61.360267099999994,12.132],[-61.35866139999999,12.136],[-61.35643999999999,12.142],[-61.35508039999999,12.145999999999999],[-61.3538123,12.15],[-61.3520543,12.156],[-61.3509963,12.16],[-61.35002769999999,12.164],[-61.3487397,12.17],[-61.34798939999999,12.174],[-61.34732449999999,12.177999999999999],[-61.3464859,12.184],[-61.346031399999994,12.187999999999999],[-61.345660099999996,12.192],[-61.3452579,12.197999999999999],[-61.3450925,12.202],[-61.3450091,12.206],[-61.345007599999995,12.209999999999999],[-61.345087899999996,12.213999999999999],[-61.3452502,12.217999999999998],[-61.345494599999995,12.222],[-61.34582149999999,12.225999999999999],[-61.3462313,12.229999999999999],[-61.347002499999995,12.235999999999999],[-61.34762189999999,12.239999999999998],[-61.3483264,12.243999999999998],[-61.3495448,12.25],[-61.345779300000004,12.253],[-61.3421596,12.256],[-61.339838,12.258],[-61.3364839,12.261],[-61.333273999999996,12.264],[-61.330199,12.267],[-61.3272505,12.27],[-61.324421099999995,12.273],[-61.3217043,12.276],[-61.31824699999999,12.28],[-61.3157713,12.283],[-61.3126179,12.286999999999999],[-61.30962449999999,12.290999999999999],[-61.3067826,12.295],[-61.30408469999999,12.299],[-61.301524099999995,12.303],[-61.300435,12.304],[-61.297963700000004,12.306000000000001],[-61.29439910000001,12.309000000000001],[-61.29211200000002,12.311],[-61.2888064,12.314],[-61.286681699999995,12.316],[-61.283606,12.319],[-61.280656900000004,12.322000000000001],[-61.277826999999995,12.325000000000001],[-61.27510960000001,12.328],[-61.272499,12.331],[-61.26999000000001,12.334],[-61.267577800000005,12.337],[-61.26525820000001,12.34],[-61.262302800000015,12.344],[-61.260184900000006,12.347],[-61.25748639999999,12.351],[-61.256502700000006,12.352000000000002],[-61.25251010000001,12.355000000000002],[-61.248711400000005,12.358],[-61.245090100000006,12.361],[-61.2416324,12.364],[-61.23832620000001,12.367],[-61.235161000000005,12.370000000000001],[-61.23212780000001,12.373000000000001],[-61.22827520000001,12.377],[-61.22552040000001,12.38],[-61.22287430000001,12.383000000000001],[-61.220331400000006,12.386000000000001],[-61.2170932,12.39],[-61.2147732,12.393],[-61.2118172,12.397],[-61.209698800000005,12.4],[-61.206999800000006,12.404],[-61.205066,12.407],[-61.20258810000001,12.411],[-61.2008015,12.414],[-61.199085100000005,12.417],[-61.19743690000001,12.42],[-61.19585520000001,12.423],[-61.19433810000001,12.426],[-61.19241330000001,12.43],[-61.1897291,12.436],[-61.1872793,12.442],[-61.18505530000001,12.447999999999999],[-61.1836941,12.452000000000002],[-61.1824277,12.456000000000001],[-61.18125439999999,12.46],[-61.180172500000005,12.464],[-61.1791805,12.468],[-61.178277,12.472000000000001],[-61.1770853,12.478],[-61.17603230000001,12.484],[-61.175387900000004,12.488],[-61.1745797,12.494],[-61.1741456,12.498],[-61.1737945,12.502],[-61.173526,12.506],[-61.17333980000001,12.51],[-61.17323580000001,12.514],[-61.173214300000005,12.516654800000001]]]],"type":"MultiPolygon"},"id":550727,"osm_type":"relation","type":"Feature","name":"Grenada","properties":{"flag":"http://upload.wikimedia.org/wikipedia/commons/b/bc/Flag_of_Grenada.svg","name":"Grenada","name:cs":"Grenada","name:de":"Grenada","name:en":"Grenada","name:eo":"Grenado","name:fr":"Grenade","name:fy":"Grenada","name:hr":"Grenada","name:nl":"Grenada","name:ru":"Гренада","name:sl":"Grenada","name:ta":"கிரெனடா","name:uk":"Гренада","boundary":"administrative","name:tzl":"Grenada","timezone":"America/Grenada","wikidata":"Q769","ISO3166-1":"GD","wikipedia":"en:Grenada","admin_level":"2","is_in:continent":"North America","ISO3166-1:alpha2":"GD","ISO3166-1:alpha3":"GRD","ISO3166-1:numeric":"308"}}],"type":"FeatureCollection","geocoding":{"creation_date":"2016-10-12","generator":{"author":{"name":"Mapzen"},"package":"fences-builder","version":"0.1.2"},"license":"ODbL (see http://www.openstreetmap.org/copyright)"}} Fiona-1.8.21/tests/data/LICENSE.txt000066400000000000000000000003231420023252700165300ustar00rootroot00000000000000The coutwildrnp shapefile and all .txt files are extracts from the US National Map's 1:2M scale Wilderness Area boundaries [1] and are in the public domain. [1] http://nationalmap.gov/small_scale/atlasftp.html Fiona-1.8.21/tests/data/collection-pp.txt000066400000000000000000000355011420023252700202240ustar00rootroot00000000000000{ "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ -111.73527526855469, 41.995094299316406 ], [ -111.65931701660156, 41.99627685546875 ], [ -111.6587142944336, 41.9921875 ], [ -111.65888977050781, 41.95676803588867 ], [ -111.67082977294922, 41.91230010986328 ], [ -111.67332458496094, 41.905494689941406 ], [ -111.67088317871094, 41.90049362182617 ], [ -111.66474914550781, 41.893211364746094 ], [ -111.6506576538086, 41.875465393066406 ], [ -111.64759826660156, 41.87091827392578 ], [ -111.64640808105469, 41.86273956298828 ], [ -111.64334869384766, 41.858192443847656 ], [ -111.63720703125, 41.85499572753906 ], [ -111.633544921875, 41.847267150878906 ], [ -111.63053894042969, 41.83409118652344 ], [ -111.6330337524414, 41.82728576660156 ], [ -111.63983154296875, 41.8227653503418 ], [ -111.6484603881836, 41.82188034057617 ], [ -111.66077423095703, 41.82327651977539 ], [ -111.6712417602539, 41.82330322265625 ], [ -111.67618560791016, 41.82013702392578 ], [ -111.68803405761719, 41.78792953491211 ], [ -111.69361114501953, 41.77931594848633 ], [ -111.70162200927734, 41.77797317504883 ], [ -111.70901489257812, 41.77663040161133 ], [ -111.71395111083984, 41.772098541259766 ], [ -111.71891784667969, 41.763031005859375 ], [ -111.72816467285156, 41.75851058959961 ], [ -111.74726104736328, 41.75537109375 ], [ -111.75650024414062, 41.752662658691406 ], [ -111.77067565917969, 41.7445182800293 ], [ -111.77064514160156, 41.75495910644531 ], [ -111.75585174560547, 41.76219940185547 ], [ -111.7330551147461, 41.766693115234375 ], [ -111.72749328613281, 41.77212905883789 ], [ -111.71883392333984, 41.7834587097168 ], [ -111.71080780029297, 41.78889083862305 ], [ -111.70340728759766, 41.79250717163086 ], [ -111.70030212402344, 41.798404693603516 ], [ -111.70210266113281, 41.8088493347168 ], [ -111.70760345458984, 41.819759368896484 ], [ -111.71312713623047, 41.82340621948242 ], [ -111.71929168701172, 41.82341766357422 ], [ -111.72545623779297, 41.8225212097168 ], [ -111.7341537475586, 41.803016662597656 ], [ -111.740966796875, 41.79213333129883 ], [ -111.74531555175781, 41.78215408325195 ], [ -111.77122497558594, 41.7658576965332 ], [ -111.77056884765625, 41.77811813354492 ], [ -111.7662582397461, 41.778106689453125 ], [ -111.76746368408203, 41.78628158569336 ], [ -111.76253509521484, 41.78627395629883 ], [ -111.76241302490234, 41.82259750366211 ], [ -111.77104187011719, 41.8221549987793 ], [ -111.77161407470703, 41.83351135253906 ], [ -111.7333755493164, 41.84524154663086 ], [ -111.73274993896484, 41.847511291503906 ], [ -111.7376708984375, 41.84979248046875 ], [ -111.77157592773438, 41.845767974853516 ], [ -111.77215576171875, 41.85802459716797 ], [ -111.75243377685547, 41.85844802856445 ], [ -111.72467803955078, 41.86384201049805 ], [ -111.71109771728516, 41.868804931640625 ], [ -111.70182037353516, 41.87604904174805 ], [ -111.69624328613281, 41.88193893432617 ], [ -111.69497680664062, 41.88874816894531 ], [ -111.70053100585938, 41.89057540893555 ], [ -111.70793151855469, 41.88923263549805 ], [ -111.72091674804688, 41.87972640991211 ], [ -111.73388671875, 41.87384796142578 ], [ -111.75301361083984, 41.86888885498047 ], [ -111.75350952148438, 41.90249252319336 ], [ -111.74364471435547, 41.90247344970703 ], [ -111.74463653564453, 41.967864990234375 ], [ -111.7119369506836, 41.96416473388672 ], [ -111.69283294677734, 41.95912551879883 ], [ -111.68911743164062, 41.96047592163086 ], [ -111.6891098022461, 41.96320343017578 ], [ -111.69341278076172, 41.96684646606445 ], [ -111.70449829101562, 41.972320556640625 ], [ -111.7341079711914, 41.97828674316406 ], [ -111.73527526855469, 41.995094299316406 ] ] ] }, "type": "Feature", "id": "0", "properties": { "PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT" } }, { "geometry": { "type": "Polygon", "coordinates": [ [ [ -112.00384521484375, 41.552703857421875 ], [ -112.00446319580078, 41.56586456298828 ], [ -112.0112075805664, 41.56586456298828 ], [ -112.01121520996094, 41.57902526855469 ], [ -112.01734924316406, 41.57902526855469 ], [ -112.0173568725586, 41.594459533691406 ], [ -112.02779388427734, 41.5940055847168 ], [ -112.02779388427734, 41.60171890258789 ], [ -112.03945922851562, 41.60126495361328 ], [ -112.04007720947266, 41.608524322509766 ], [ -112.04744720458984, 41.608524322509766 ], [ -112.0474624633789, 41.62804412841797 ], [ -112.05974578857422, 41.62758255004883 ], [ -112.05975341796875, 41.640296936035156 ], [ -112.050537109375, 41.64030075073242 ], [ -112.05054473876953, 41.64983367919922 ], [ -112.04132843017578, 41.64983367919922 ], [ -112.04195404052734, 41.66299819946289 ], [ -112.05793762207031, 41.662540435791016 ], [ -112.0579605102539, 41.692047119140625 ], [ -112.07394409179688, 41.692039489746094 ], [ -112.07459259033203, 41.72381591796875 ], [ -112.06167602539062, 41.72382354736328 ], [ -112.0616683959961, 41.71383285522461 ], [ -112.05490112304688, 41.713836669921875 ], [ -112.04137420654297, 41.71384048461914 ], [ -112.04138946533203, 41.7379035949707 ], [ -112.0376968383789, 41.74108123779297 ], [ -112.03339385986328, 41.741085052490234 ], [ -112.02908325195312, 41.729736328125 ], [ -112.02599334716797, 41.71657180786133 ], [ -112.0241470336914, 41.71157455444336 ], [ -112.0272216796875, 41.704769134521484 ], [ -112.02413940429688, 41.70068359375 ], [ -112.01676177978516, 41.69977951049805 ], [ -112.01615142822266, 41.7070426940918 ], [ -112.00508117675781, 41.707496643066406 ], [ -112.00508117675781, 41.66618347167969 ], [ -111.9792709350586, 41.6666374206543 ], [ -111.9786605834961, 41.653926849365234 ], [ -111.96821594238281, 41.65346908569336 ], [ -111.96760559082031, 41.6407585144043 ], [ -111.96146392822266, 41.6407585144043 ], [ -111.96025085449219, 41.61125183105469 ], [ -111.95042419433594, 41.61124801635742 ], [ -111.94796752929688, 41.60988235473633 ], [ -111.94735717773438, 41.60761260986328 ], [ -111.9522705078125, 41.60443878173828 ], [ -111.96455383300781, 41.60262680053711 ], [ -111.9682388305664, 41.60398864746094 ], [ -111.9725341796875, 41.60807418823242 ], [ -111.97560119628906, 41.60943603515625 ], [ -111.97928619384766, 41.61034393310547 ], [ -111.98542785644531, 41.609439849853516 ], [ -111.98481750488281, 41.58356475830078 ], [ -111.97868347167969, 41.58356857299805 ], [ -111.97745513916016, 41.570404052734375 ], [ -111.97132110595703, 41.57085418701172 ], [ -111.97132110595703, 41.56450271606445 ], [ -111.98297882080078, 41.564048767089844 ], [ -111.98175811767578, 41.54090118408203 ], [ -111.98176574707031, 41.53545379638672 ], [ -112.00323486328125, 41.53545379638672 ], [ -112.00384521484375, 41.552703857421875 ] ] ] }, "type": "Feature", "id": "1", "properties": { "PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT" } } ] } Fiona-1.8.21/tests/data/collection.txt000066400000000000000000000154051420023252700176100ustar00rootroot00000000000000{"type": "FeatureCollection", "features": [{"geometry": {"type": "Polygon", "coordinates": [[[-111.73527526855469, 41.995094299316406], [-111.65931701660156, 41.99627685546875], [-111.6587142944336, 41.9921875], [-111.65888977050781, 41.95676803588867], [-111.67082977294922, 41.91230010986328], [-111.67332458496094, 41.905494689941406], [-111.67088317871094, 41.90049362182617], [-111.66474914550781, 41.893211364746094], [-111.6506576538086, 41.875465393066406], [-111.64759826660156, 41.87091827392578], [-111.64640808105469, 41.86273956298828], [-111.64334869384766, 41.858192443847656], [-111.63720703125, 41.85499572753906], [-111.633544921875, 41.847267150878906], [-111.63053894042969, 41.83409118652344], [-111.6330337524414, 41.82728576660156], [-111.63983154296875, 41.8227653503418], [-111.6484603881836, 41.82188034057617], [-111.66077423095703, 41.82327651977539], [-111.6712417602539, 41.82330322265625], [-111.67618560791016, 41.82013702392578], [-111.68803405761719, 41.78792953491211], [-111.69361114501953, 41.77931594848633], [-111.70162200927734, 41.77797317504883], [-111.70901489257812, 41.77663040161133], [-111.71395111083984, 41.772098541259766], [-111.71891784667969, 41.763031005859375], [-111.72816467285156, 41.75851058959961], [-111.74726104736328, 41.75537109375], [-111.75650024414062, 41.752662658691406], [-111.77067565917969, 41.7445182800293], [-111.77064514160156, 41.75495910644531], [-111.75585174560547, 41.76219940185547], [-111.7330551147461, 41.766693115234375], [-111.72749328613281, 41.77212905883789], [-111.71883392333984, 41.7834587097168], [-111.71080780029297, 41.78889083862305], [-111.70340728759766, 41.79250717163086], [-111.70030212402344, 41.798404693603516], [-111.70210266113281, 41.8088493347168], [-111.70760345458984, 41.819759368896484], [-111.71312713623047, 41.82340621948242], [-111.71929168701172, 41.82341766357422], [-111.72545623779297, 41.8225212097168], [-111.7341537475586, 41.803016662597656], [-111.740966796875, 41.79213333129883], [-111.74531555175781, 41.78215408325195], [-111.77122497558594, 41.7658576965332], [-111.77056884765625, 41.77811813354492], [-111.7662582397461, 41.778106689453125], [-111.76746368408203, 41.78628158569336], [-111.76253509521484, 41.78627395629883], [-111.76241302490234, 41.82259750366211], [-111.77104187011719, 41.8221549987793], [-111.77161407470703, 41.83351135253906], [-111.7333755493164, 41.84524154663086], [-111.73274993896484, 41.847511291503906], [-111.7376708984375, 41.84979248046875], [-111.77157592773438, 41.845767974853516], [-111.77215576171875, 41.85802459716797], [-111.75243377685547, 41.85844802856445], [-111.72467803955078, 41.86384201049805], [-111.71109771728516, 41.868804931640625], [-111.70182037353516, 41.87604904174805], [-111.69624328613281, 41.88193893432617], [-111.69497680664062, 41.88874816894531], [-111.70053100585938, 41.89057540893555], [-111.70793151855469, 41.88923263549805], [-111.72091674804688, 41.87972640991211], [-111.73388671875, 41.87384796142578], [-111.75301361083984, 41.86888885498047], [-111.75350952148438, 41.90249252319336], [-111.74364471435547, 41.90247344970703], [-111.74463653564453, 41.967864990234375], [-111.7119369506836, 41.96416473388672], [-111.69283294677734, 41.95912551879883], [-111.68911743164062, 41.96047592163086], [-111.6891098022461, 41.96320343017578], [-111.69341278076172, 41.96684646606445], [-111.70449829101562, 41.972320556640625], [-111.7341079711914, 41.97828674316406], [-111.73527526855469, 41.995094299316406]]]}, "type": "Feature", "id": "0", "properties": {"PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT"}}, {"geometry": {"type": "Polygon", "coordinates": [[[-112.00384521484375, 41.552703857421875], [-112.00446319580078, 41.56586456298828], [-112.0112075805664, 41.56586456298828], [-112.01121520996094, 41.57902526855469], [-112.01734924316406, 41.57902526855469], [-112.0173568725586, 41.594459533691406], [-112.02779388427734, 41.5940055847168], [-112.02779388427734, 41.60171890258789], [-112.03945922851562, 41.60126495361328], [-112.04007720947266, 41.608524322509766], [-112.04744720458984, 41.608524322509766], [-112.0474624633789, 41.62804412841797], [-112.05974578857422, 41.62758255004883], [-112.05975341796875, 41.640296936035156], [-112.050537109375, 41.64030075073242], [-112.05054473876953, 41.64983367919922], [-112.04132843017578, 41.64983367919922], [-112.04195404052734, 41.66299819946289], [-112.05793762207031, 41.662540435791016], [-112.0579605102539, 41.692047119140625], [-112.07394409179688, 41.692039489746094], [-112.07459259033203, 41.72381591796875], [-112.06167602539062, 41.72382354736328], [-112.0616683959961, 41.71383285522461], [-112.05490112304688, 41.713836669921875], [-112.04137420654297, 41.71384048461914], [-112.04138946533203, 41.7379035949707], [-112.0376968383789, 41.74108123779297], [-112.03339385986328, 41.741085052490234], [-112.02908325195312, 41.729736328125], [-112.02599334716797, 41.71657180786133], [-112.0241470336914, 41.71157455444336], [-112.0272216796875, 41.704769134521484], [-112.02413940429688, 41.70068359375], [-112.01676177978516, 41.69977951049805], [-112.01615142822266, 41.7070426940918], [-112.00508117675781, 41.707496643066406], [-112.00508117675781, 41.66618347167969], [-111.9792709350586, 41.6666374206543], [-111.9786605834961, 41.653926849365234], [-111.96821594238281, 41.65346908569336], [-111.96760559082031, 41.6407585144043], [-111.96146392822266, 41.6407585144043], [-111.96025085449219, 41.61125183105469], [-111.95042419433594, 41.61124801635742], [-111.94796752929688, 41.60988235473633], [-111.94735717773438, 41.60761260986328], [-111.9522705078125, 41.60443878173828], [-111.96455383300781, 41.60262680053711], [-111.9682388305664, 41.60398864746094], [-111.9725341796875, 41.60807418823242], [-111.97560119628906, 41.60943603515625], [-111.97928619384766, 41.61034393310547], [-111.98542785644531, 41.609439849853516], [-111.98481750488281, 41.58356475830078], [-111.97868347167969, 41.58356857299805], [-111.97745513916016, 41.570404052734375], [-111.97132110595703, 41.57085418701172], [-111.97132110595703, 41.56450271606445], [-111.98297882080078, 41.564048767089844], [-111.98175811767578, 41.54090118408203], [-111.98176574707031, 41.53545379638672], [-112.00323486328125, 41.53545379638672], [-112.00384521484375, 41.552703857421875]]]}, "type": "Feature", "id": "1", "properties": {"PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT"}}]}Fiona-1.8.21/tests/data/coutwildrnp.cpg000066400000000000000000000000131420023252700177460ustar00rootroot00000000000000ISO-8859-1 Fiona-1.8.21/tests/data/coutwildrnp.dbf000066400000000000000000001243411420023252700177430ustar00rootroot00000000000000_CaPERIMETERNFEATURE2CPNAMECPFEATURE1CPURLCeAGBURCPAREANSTATE_FIPSCPWILDRNP020N STATECP 1.221070000000000 Mount Naomi Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi FS 0.01792640000000049 332UT 0.755827000000000 Wellsville Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain FS 0.01044410000000049 336UT 1.708510000000000 Mount Zirkel Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Zirkel FS 0.07149550000000008 357CO 2.232410000000000 High Uintas Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=High%20Uintas FS 0.18291900000000049 358UT 1.054580000000000 Rawah Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Rawah FS 0.03373710000000008 359CO 0.418340000000000 Mount Olympus Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Olympus FS 0.00633137000000049 364UT 1.760390000000000 Comanche Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Comanche%20Peak FS 0.03197700000000008 365CO 0.462863000000000 Cache La Poudre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Cache%20La%20Poudre FS 0.00481977000000008 366CO 0.315219000000000 Twin Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Twin%20Peaks FS 0.00477962000000049 367UT 0.329520000000000 Neota Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Neota FS 0.00576742000000008 369CO 0.518395000000000 Lone Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lone%20Peak FS 0.01251300000000049 371UT 0.477348000000000 Deseret Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Deseret%20Peak FS 0.01077180000000049 373UT 0.675146000000000 Never Summer Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Never%20Summer FS 0.00908863000000008 374CO 0.288683000000000 Mount Timpanogos Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Timpanogos FS 0.00442921000000049 375UT 0.768802000000000 Sarvis Creek Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sarvis%20Creek FS 0.01957160000000008 376CO 1.372940000000000 Indian Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Indian%20Peaks FS 0.03140190000000008 378CO 2.029470000000000 Flat Tops Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Flat%20Tops FS 0.10322100000000008 380CO 0.765491000000000 James Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=James%20Peak FS 0.00676706000000008 384CO 0.726088000000000 Mount Nebo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Nebo FS 0.01203290000000049 385UT 0.376165000000000 Byers Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Byers%20Peak FS 0.00345459000000008 386CO 0.528667000000000 Vasquez Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Vasquez%20Peak FS 0.00542539000000008 387CO 1.257970000000000 Eagles Nest Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Eagles%20Nest FS 0.05923840000000008 388CO 0.522076000000000 Ptarmigan Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Ptarmigan%20Peak FS 0.00574553000000008 389CO 1.078160000000000 Mount Evans Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Evans FS 0.03254480000000008 390CO 1.438710000000000 Holy Cross Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Holy%20Cross FS 0.05451810000000008 391CO 1.463510000000000 Lost Creek Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lost%20Creek FS 0.04967140000000008 396CO 1.063300000000000 Hunter-Fryingpan Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Hunter%2DFryingpan FS 0.03224480000000008 398CO 1.458040000000000 Maroon Bells-Snowmass Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Maroon%20Bells%2DSnowmass FS 0.07808400000000008 399CO 0.738527000000000 Mount Massive Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Massive FS 0.01047520000000008 400CO 0.193332000000000 Mount Massive Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Massive FWS 0.00093778200000008 401CO 0.820306000000000 Buffalo Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Buffalo%20Peaks FS 0.01711430000000008 404CO 2.025460000000000 Collegiate Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Collegiate%20Peaks FS 0.07376710000000008 405CO 0.907013000000000 Raggeds Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Raggeds FS 0.02990640000000008 406CO 1.644000000000000 West Elk Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=West%20Elk FS 0.07623760000000008 415CO 0.538332000000000 Fossil Ridge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Fossil%20Ridge FS 0.01317560000000008 419CO 0.826888000000000 Gunnison Gorge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Gunnison%20Gorge BLM 0.00739996000000008 420CO 0.707377000000000 Black Canyon of the Gunnison Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Black%20Canyon%20of%20the%20GunnisonNPS 0.00663470000000008 425CO 0.735176000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.01336290000000008 427CO 0.393427000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00685532000000008 433CO 0.829067000000000 Powderhorn Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Powderhorn BLM 0.01925610000000008 438CO 0.446917000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00708528000000008 440CO 1.224290000000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre FS 0.04125950000000008 442CO 0.047141800000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00013151100000008 444CO 0.349875000000000 Powderhorn Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Powderhorn FS 0.00614416000000008 446CO 0.733543000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00914042000000008 447CO 0.065853400000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00026714500000008 449CO 1.616820000000000 La Garita Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=La%20Garita FS 0.05340450000000008 451CO 0.407763000000000 Mount Sneffels Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Sneffels FS 0.00702802000000008 452CO 0.196611000000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00150059000000008 454CO 0.860610000000000 Box-Death Hollow Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Box%2DDeath%20Hollow FS 0.00997307000000049 455UT 0.779127000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo NPS 0.01755200000000008 458CO 0.561821000000000 Greenhorn Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Greenhorn%20Mountain FS 0.00975499000000008 461CO 0.171344000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00099257300000008 462CO 0.697435000000000 Lizard Head Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lizard%20Head FS 0.01764510000000008 465CO 1.742490000000000 Dark Canyon Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Dark%20Canyon FS 0.02586860000000049 468UT 0.574187000000000 Great Sand Dunes Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Great%20Sand%20Dunes NPS 0.01359200000000008 470CO 0.133740000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00093922600000008 471CO 3.301370000000000 Weminuche Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Weminuche FS 0.19608500000000008 472CO 0.454338000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00750880000000008 479CO 0.275139000000000 Ashdown Gorge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Ashdown%20Gorge FS 0.00342165000000049 480UT 0.336214000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00394863000000008 482CO 0.191092000000000 Weminuche Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Weminuche FS 0.00123684000000008 487CO 0.736247000000000 Pine Valley Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Pine%20Valley%20Mountain FS 0.02126900000000049 499UT 2.032130000000000 South San Juan Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=South%20San%20Juan FS 0.07043340000000008 504CO 0.263251000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00218289000000008 509CO 0.119581000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00053934000000008 510CO 0.120627000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00081771100000008 511CO Fiona-1.8.21/tests/data/coutwildrnp.prj000066400000000000000000000002171420023252700177760ustar00rootroot00000000000000GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]Fiona-1.8.21/tests/data/coutwildrnp.shp000066400000000000000000003257541420023252700200150ustar00rootroot00000000000000' d\`ԈB@">ZD@k[`LD@Z[D@R[@_D@@2[D@`([D@@+[`wD@[@D@[@D@[`CD@@[TD@`[@D@@r[@zD@^[@nD@,[@D@[pD@[@sD@Z[D@[D@[`PD@`[`3D@ J[ aD@[bD@F[@D@[D@ d[D@`[D@`[hD@`[ D@[D@@[D@ [D@j[@WD@R[`LD@@R[D@_[D@`[#D@@[ D@`[`HD@}[`D@[pD@[ 2D@@[`D@`I[D@[`eD@[eD@m[`HD@`[@D@l[dD@@[D@[[D@Q[`D@` [D@ [D@`[D@`[JD@X[`[`D@@p[D@[D@@[ D@@[@D@[`D@[D@ *[D@ *[AD@ [2D@ [@Z{D@ ߵZ2|D@iZn|D@`Z`{D@ hZ|D@Z@}D@[GD@[`qD@n/[kmD@@[ nlD@[lkD@[fiD@[hD@ં[hD@@[?hD@@u[gD@`[gD@[gD@Y[@gD@R[gD@[gD@`W[@>gD@`[{gD@[$gD@v[reD@o[WbD@[`^D@![,\D@[@yZD@ [YD@[`4WD@n[}VD@ [TD@[`*SD@`[RD@@N[RD@࿕[@QD@@[ -PD@@[`wOD@8[ ND@`[@LD@@[ KD@[ND@ ל["RD@`[`QD@[PD@[QOD@)[@8ND@ [@LD@w[jLD@[LD@Š[LD@`[`LD@ [`LD@`)[ LD@ [2LD@ޡ[@KD@ף[KD@[KD@J[KD@J[LD@[LD@[!KD@][`LD@`[ gLD@[`TKD@`[LD@[ WLD@[KD@ȳ[ID@G[ GD@[GD@[ 'HD@S[`nMD@[ND@M[PD@w[8SD@ D[TD@[#TD@`¶[mTD@@3[@sVD@श[@YD@`[ZD@([\D@`8[]D@ȵ[`^D@`[``D@H[ dD@[eD@@[@TfD@[ egD@>[^gD@1[ gD@[`5hD@`Z[YD@~ZYD@@~Z:ZD@LZZD@`jZ [D@`tZc[D@`jZ`\D@`jZ@~\D@Z\D@@Z ]D@` Z]D@ ЀZ8_D@Z`D@`[Z`D@`tZ)aD@ yZAaD@ ZaD@Z bD@Z`cD@TZ (dD@ZdD@˂Z@`eD@`߂Z ?fD@@ZfD@9Z@gD@tZ@gD@Z VhD@ Z@hD@Z iD@`pZiD@ ZiD@ʅZ 1jD@Z@jD@ZjD@-ZjD@ ZjD@ZkD@Z KkD@ZlD@ Z@lD@`ZmD@ZmD@ ZlD@ZlD@ZqmD@ ZmD@`PZnD@ZnD@ŃZ@mD@`kZ@mD@@‚Z@`nD@6ZnD@`ƀZoD@VZoD@`!~ZoD@|ZoD@@|Z jD@@0|Z jD@`0|Z hD@{Z`fD@ .{Z`eD@SzZbdD@QyZDdD@`QyZ@KbD@yZ@D@]Z@=D@]ZJ=D@k]Z]Z@VD@ ]ZJWD@\Z@WD@\ZXD@9\Z}YD@[ZAYD@[ZWD@ [ZWD@[Z *WD@ `['ID@@[;PD@[6ND@n[ND@@V[,PD@ [;PD@J[@OD@ [ND@ "[@MD@>[ LD@[&LD@@[@KD@[@JD@[=JD@[pID@ ['ID@[8ID@ [@&JD@`[JD@[LD@[7ND@[6ND@  xZD@tZ`>D@3tZ >D@`tZD@ vZ>D@{vZ?D@vZ @D@ vZ`@D@@vZAD@wZ BD@@xZBD@ixZBD@ xZBD@ xZ@CD@@xZ;CD@xZ CD@xZCD@xZDD@ UxZED@wZFD@@wZGD@fwZVHD@HwZ HD@ wZID@ p@[v6D@w[FD@+@D[FD@[OFD@[`"FD@ [FD@G[FD@[FD@M[FD@@[FD@`[xFD@[ED@[`DD@[cCD@a[BD@`[@BD@[`BD@@j[BD@`[H=D@w[@;D@@[1;D@p[99D@[7D@@[7D@[@8D@ [8D@[7D@@[ 7D@ [@6D@[v6D@[ 7D@[7D@3[ 8D@(["9D@ 2[@:D@0[ =D@0[ \>D@@ [k>D@m[?D@[.?D@[@D@@[DD@@[DD@[7ED@@D[FD@ 0@+\1D@ $\`CD@#+\@D@'+\@D@+\BD@a)\MBD@@'\nBD@`K'\CD@ &\`CD@5&\`(BD@ &\ AD@&\`{?D@&\>D@&\a>D@&\@6>D@`%\@=D@ %\=D@ %\@;D@ %\)9D@%\`l7D@`%\4D@ $\`^3D@ $\2D@^%\@2D@%\22D@#&\ 2D@&\2D@`9'\2D@k(\v2D@)\1D@`y)\`4D@*\ 4D@*\@7D@@+\@7D@`+\ 9D@+\)9D@+\@D@ 0Zm'D@ wZ mZ0D@ mZD@@mZD@mZKD@ mZD@GmZD@lZD@lZD@lZ~D@mmZ mD@mZfD@NnZ F D@nZ@ D@@oZ D@oZ D@oZ D@`oZ D@oZ D@@oZ@ D@aoZ D@%oZ` D@oZ D@ %oZ D@}oZ D@oZD@oZD@UpZ`D@7qZD@ AqZD@rZD@rZD@UsZ D@qsZ`D@`sZD@rZ`<D@@rZ}D@`qZ~D@@aqZ@oD@"qZ@YD@ qZ8D@pZD@ pZ`)D@oZjD@soZ xD@oZ`D@nZD@`nZ@D@DnZ@D@ (nZpD@nZHD@ nZD@mZD@ mZ`D@mZ D@@,mZ@*D@0mZ`D@`mZ D@lZ@D@lZD@dlZ)D@kZD@jZKD@jZ`D@|jZD@iZwD@ [iZ{D@@$iZPD@@hZ@D@LhZ@D@gZD@NgZD@@*gZD@fZhD@fZ>D@fZDD@ eZ |D@eZD@ eZ@D@dZ .D@`dZD@dZD@@dZiD@@^dZ@D@@cZD@cZxD@5cZD@bZ9D@bZbD@@bZ`8D@bcZ D@cZD@`cZjD@cZD@@cZD@bZ ND@bZD@[cZD@@cZD@@dZD@4eZD@feZD@ feZhD@ReZ-D@ eZ@D@dZMD@`dZ@D@`dZ$D@dZ@ D@`dZ@ D@@dZ D@_dZw D@ cZ` D@cZ@z D@@cZ0 D@dZ D@kdZ`C D@eZ& D@WeZ` D@keZ@ D@leZ`D@eZ`.D@ eZD@fZoD@ciZD@FiZJD@ 3iZ`D@ hZD@ hZ D@ iZPD@yiZD@ iZ 7D@@iZ vD@@iZ:D@ziZ`D@ fZ D@`fZD@zdZ D@|dZ`D@dZC@ eZC@fZ`oC@VfZ`4C@fZ4C@fZsC@gZ@C@@igZ2C@gZ}C@@AhZC@@hZC@@hZ`C@ iZDC@iZC@ hZ`C@`hZ C@@hZhC@`9hZwC@!hZEC@gZ C@gZ@C@`VgZ`C@ CgZjC@gZC@ gZC@`gZC@fZC@`fZC@@fZ`xC@@fZ@gC@@fZ XC@fZ@&C@gZ C@`zgZC@@XgZ@OC@XgZ@C@XgZC@gZeC@gZ*C@2hZ@C@vhZC@hZC@hZC@iZ>C@ C@ IZC@GZC@dZC@ZC@ZC@@ZC@ZC@`Z@C@ ZqC@`fZ`RC@Z`C@\ZC@ZC@ZC@Z C@ ZC@@$Z`(C@3Z C@=Z@C@ dZ`C@mZ1C@ EZC@ZUC@`Z C@ nZ@C@ZC@@Z`#C@/Z@C@-Z FC@Z9C@@@ZC@ZC@ ZC@@ZuC@@Z C@&ZhC@MZ@JC@`ZC@@Z"C@`Z@C@;ZC@@XZC@Z`C@`Z`C@ZVC@KZC@ZyC@@`Z`C@ ZC@ Z&C@`>Z C@`Z`"C@Z C@hZ NC@@ZC@`ZpC@ ZvC@Z@C@ZC@Z@hC@jZ9C@`ZC@ZC@ZC@Z PC@`Z@C@[Z`Z`/C@Z`C@aZC@ Z7C@ Z(C@XZ@ C@tZC@ZTC@LZC@1Z C@EZ>C@ZC@ZC@Z C@ZqC@@ZC@ Z`C@Z`C@Z C@`Z`8C@@ZC@Z~C@@|ZC@`3ZKC@`ZPC@ZC@@Z9C@Z C@fZC@HZC@Z@4C@ZtC@/ZhC@Z`C@`Z@7C@Z@C@Z C@@HZC@Z C@Z`C@`OZC@ ZRC@\ZC@ #Z@qC@`8Z@C@yZtC@fZC@ZC@ZHD@Z 7D@MZsD@Z D@[ QC@ >[C@`=[@C@ F[C@[C@@@rZC@{ZC@E{Z`OC@'|ZC@S|ZC@ /|ZC@"|ZC@ |Z%C@|Z@C@@}Z`C@}Z`RC@}Z@!C@ }Z /C@}ZC@@*}Z@C@ }ZGC@@$}ZC@[}ZC@b}ZC@S}ZC@|ZC@.}Z`C@@@}ZC@ }Z`C@}ZC@Y~Z`4C@[~Z aC@}Z`C@@}Z`"C@ G~Z&C@`~ZyC@~Z`C@ \~Z@#C@]~Z AC@~Z#C@kZC@`Z C@`Z`[C@ZEC@Z`C@ Z C@\Z C@ZC@UZ`8C@`XZC@ ZC@ Z@4C@@rZ@C@Z@C@ ZC@ZC@IZ]C@`ZaC@`qZC@ Z`C@`7Z`WC@@Z C@`ZC@Z@C@ZC@`ԋZC@ZSC@^Z5C@TZC@@hZ HC@{Z}ZC@ |ZC@`|Z`C@}|ZwC@l|Z\C@`_|ZHC@ %|ZC@|ZC@&|Z`C@mZ$C@`]Z`C@`gZ/C@gZ $C@gZ@#C@gZDC@`VgZ 'C@tgZC@~gZ@fC@jgZC@`0gZC@fZhC@fZJC@fZ C@`mfZkC@`FfZ`\C@ fZ C@@eZ@C@eZ@:C@eZ@C@eZ'C@ceZ@C@OeZC@`YeZAC@@OeZ`C@`1eZ[C@dZ@C@@dZC@dZ +C@dZWC@ cZ4C@cZ`C@`bZ$C@[bZ C@faZ>C@5aZ`>C@``Z C@s`Z C@_ZBC@_ZC@`.aZ`uC@ aZ ;C@@aZ`C@bZ`C@cZC@`AcZfC@AcZC@_cZ =C@cZC@cZ C@`cZnC@ cZC@cZ bC@tcZ`5C@@0cZ4C@cZpC@bZ~C@@YbZC@aZC@4aZC@`Z C@`Z`C@`ZVC@_Z VC@/_Z UC@^ZC@`2^Z tC@R]ZC@ ]Z`C@`]Z`WC@6]ZC@@!^Z C@f^ZC@@^ZC@` _ZbC@@Q_ZC@_ZC@`_Z`C@9aZ gC@ aZC@bZ C@^bZ`9C@`bZ C@@rbZ`xC@`YbZC@bZ +C@@cZpC@dZC@IeZ`C@@eZC@`DfZ2C@ QfZ@C@@=eZ@C@zeZ`7C@ dZ,C@eZ$C@ gZIC@hZ C@^kZ`sC@zkZC@`kZ`C@`lZ`zC@@ZlZ`hC@lZC@mZC@}mZ`_C@@mZ C@`mZ@kC@mZC@mZ@C@ mZC@JmZC@mZ`MC@lZ C@mZ ;C@ImZC@pmZC@mZdC@mZ C@mZ`C@mZC@mZC@mZKC@mZC@ZmZC@dmZ&C@mZC@ mZC@mZ@C@mZC@mZ C@umZC@:mZC@@mZC@`lZC@ lZ@EC@ClZ@C@jZ@C@@jZ@oC@PjZ QC@jZ@$C@>iZ@C@hZC@]hZ`C@@hZ`C@`gZ/C@שZ¤C@~Z9C@uZ C@Z C@ZC@ ZC@tZC@`~Z`C@̚ZrC@ߚZ@EC@Z@+C@ޚZC@"ZiC@?Z C@>ZC@RZC@xZC@@ZUC@@xZ޿C@ZZwC@Z C@`Z`C@Z ˽C@`Z ڽC@ZC@ZǾC@Z kC@@Z`,C@UZC@`ZC@ZC@ :ZC@`Z C@@Z6C@~ZC@ZC@#ZԻC@Z`TC@Z ·C@Z` C@ZC@@Z@RC@(Z 4C@YZQC@Z@C@ZǴC@ZKC@ZC@8ZC@Z C@@˞ZC@Z@C@@"Z@ C@"Z@C@Z=C@ZC@qZdzC@qZ@C@Z_C@zZC@6ZC@ZUC@ZdC@`iZC@ZC@@לZC@XZC@ZdC@Z@γC@`TZ C@sZC@טZC@ZC@bZC@ ;Z >C@`'Z@ֲC@DZC@7ZC@`^Z@RC@{ZC@ {ZC@{ZFC@\Z C@@\Z?C@ZC@Z`C@Z9C@ZC@ZC@`ęZթC@@ZOC@aZ C@WZ@ΧC@ZtC@`™Z C@ ߙZ ZC@ԙZ`{C@`ޙZ?C@5ZC@@WZC@ ^Z¤C@ Z`ΤC@ZC@`iZ C@`ZC@Z C@ SZ@C@`zZ^C@`ZC@ZC@@Z)C@ZC@Z C@Z@bC@ ZC@`Z#C@ Z@@C@-ZC@ZC@ Z(C@@jZཫC@9ZC@Z@ܫC@ġZ@C@Z`ΫC@Z C@ Z C@`Z`nC@@Z@֭C@ ڡZ@C@bZC@`ZC@_ZsC@_Z༤C@`_Z@QC@_ZC@@_ZzC@`Z`C@V`ZC@ `Z@C@`ZC@`Z C@6aZ/C@aZ ԧC@PbZ`=C@`bZ@ĨC@cZC@dZݪC@ dZ@FC@(eZdC@eZC@LfZ@EC@@fZ C@gZC@SgZsC@gZ)C@gZ C@ hZC@iZIC@hjZ`*C@`rjZVC@ rjZC@jZ6C@ kZڮC@@4kZ`$C@lZC@`lZկC@tmZ 5C@`!nZC@`kZ"C@kZC@lZ C@@lZC@lZ`C@kZ;C@ {jZC@iZҸC@`hZC@fZC@dZ@C@:cZ C@aZ@^C@@|ZC@nZ1C@p{ZC@@ZwC@ZC@۟Z/C@ПZฝC@Z3C@nZC@`Z C@ZÚC@ZǙC@ZC@˟ZC@"ZC@\ZC@eZC@xZkC@٠Z }C@ Z@AC@`tZ@OC@ZC@6ZMC@`ZC@ Z`xC@ GZiC@ ZC@`Z@MC@ Z@C@௣Z3C@`Z`TC@ZC@ZC@Z`ϏC@@uZC@Z-C@`ZC@ZHC@eZ`C@ǧZ{C@ZC@`mZ@C@Z ϐC@ZːC@_ZC@ZC@mZ C@MZC@Z9C@TZC@@|Z/C@ /ZC@ZC@@Z@"C@@үZC@ZZC@+Z)C@@KZəC@@BZ]C@ͯZ|C@Z@C@Z kC@=ZC@ZÜC@ZC@DZǜC@Z &C@ =Z`C@࡫Z@sC@ Z`C@ZiC@3Z@MC@ Z@C@`Z@hC@Z@C@`RZFC@`Z@C@@ZൟC@ Z@C@ZrC@`ZOC@@ZC@Z@C@ZC@(Z{C@ Z@ӥC@Z+C@Z`C@`Z`C@@ZAC@Z@oC@:Z1C@OZ C@ZШC@ PZ`AC@ZC@)ZZ`C@ZuC@-ZC@ˆZ:C@ NZC@@ZC@ZڇC@xZC@wZyC@PZ@C@Z@C@Z`C@ZC@ Z@C@@PZC@ PZC@2Z C@ ZՀC@Z|C@Z@nC@ZǀC@@Z`C@Z+C@aZC@ZC@@eZނC@ ZC@ZC@@ZC@;Z ԀC@@̃Zl}C@`Z@h}C@DZ}C@Z m|C@ńZ{C@ Z{C@MZ"|C@Z |C@?Z|C@VZ|C@Z {C@`Z {C@ Z{C@ZzC@Z {C@DZ{C@இZzC@ ܇ZzC@@Z zC@ZH{C@@9Zw{C@ ZZ}{C@ZY{C@@ˆZ zC@`ZzC@jZyC@@߉ZyC@ RZzC@RZzC@@Z{C@Z|C@BZ`|C@@Z}C@ZIC@ Z`cC@Z@3C@ Z`C@ZوC@ZZ C@QZC@ZnC@ZmC@`ϑZ @mC@ZlC@`ZZkC@ҐZjC@ Z@jC@qZjC@]ZhC@ZphC@Z &hC@@ҒZ@nhC@vZOhC@ÓZgC@ZgC@3Z hC@Z [hC@ZhC@4Z]gC@@ؗZgC@VZ gC@ԘZ@igC@!Z hC@?ZhC@ љZiC@Z`kC@)ZkC@ĚZ@ lC@BZ`blC@ ԛZRlC@`xZ lC@@ٜZ@lC@Z@nlC@ƜZ mC@@#Z@nC@̛Z oC@`ÛZoC@@ZpC@KZqC@Z@pC@ܜZ`pC@ ZpC@ZqC@НZ@qC@'ZGrC@1ZrC@ OZasC@`ZsC@Z]tC@ԟZ uC@mZuC@@2Z `vC@BZvC@ӢZgwC@Z^yC@cZ`yC@8ZyyC@Z iyC@`Z@ zC@Z?{C@ LZ{C@ʨZ@|C@`Z@|C@fZ|C@ Z`6|C@DZ{C@êZB|C@ Z Z}C@@pZ`c~C@ Z C@ਭZ@,C@@uZC@@AZـC@Z #C@ZC@ZԁC@ fZ:C@ZC@Z9C@`Z uC@యZC@@دZ@C@Z C@ ZࢆC@Z6C@@ ZC@gC@ ZgC@`BZ `hC@@ZiC@`ZkC@ Z`kC@ZlC@Z` mC@=Z kmC@Z`mC@Z@mC@,Z lC@ZYlC@ ZlC@Z mC@@YZ mC@ZmC@`Z`2lC@`ZlC@@ZkC@ZwkC@PZLkC@ZjC@Z`pjC@@ZAjC@Z UjC@ZBkC@,ZFkC@`7ZiC@"Z iC@ ZfC@ZfC@ZeC@ZZ [eC@VZGeC@+ZeC@0Z@eC@OZ`aC@,Z^C@GZx]C@ )Z@]C@Z`]C@BZn]C@@mZc]C@Z\C@ Zj\C@Z[C@`wZ$[C@`2ZsZC@Z`YC@ZXC@@ZOXC@@ZWC@ 6Z`ZWC@`"Z@WC@`5Z`VC@Z VC@ZUC@Z TC@PZ@SC@ ZRC@QZ `QC@`PZPC@ZOC@ Z UOC@ZNC@`Z@NC@`GZNC@ZNC@Z NC@`7ZJNC@Z9NC@ Z MC@Z@MC@XZMC@&Z OC@Z7PC@Z`qPC@=Z QC@KZ"QC@`ZQC@ ZQC@QZ@(RC@ZpRC@` ZRC@kZ RC@Z lRC@Z`#QC@`Z`&OC@ ZsNC@Z@MC@Z5KC@`Z@,IC@ZGC@Z GC@ Z _GC@ZFC@Z@FC@`:ZdEC@#ZNC@ʤZaC@o Z$aC@Z`aC@ZaC@Z@aC@ |ZwaC@*ZCaC@ĦZ!aC@`$Z`C@Z`C@@ťZM_C@Zs^C@ Z]C@@Z)]C@Z\C@RZZC@ #ZYC@`+ZyYC@ PZ`XC@Z 2XC@ Z`WC@ZvWC@Z`%WC@}Z VC@`LZVC@ZVC@@ ZUC@ʤZTC@@ͤZUTC@5ZSC@Z7SC@ Z RC@@Z RC@@BZ@VSC@[Z@ TC@`ZTC@Z@TC@@ZSC@ZSC@ZZSC@#ZTSC@`ZSC@ZzSC@`ɨZPSC@ Z'SC@ZRC@@%ZRC@9ZqRC@zZRC@ZnRC@Z"RC@ZQC@@ZQC@ZQC@@ΩZQC@ߩZ@ RC@Z RC@)ZQC@EZ`QC@ZPC@ZVPC@1Z`OC@GZNC@ìZNC@Z NC@ Z NC@ԬZFOC@ڬZ@rOC@yZPC@ZhPC@Z`PC@ZPC@@vZZQC@ ZRC@Z SC@Z`SC@ZSC@ ZMTC@`ͬZ TC@@Z@OUC@LZUC@OZUC@@@ZVC@`Z@RVC@ZVC@ ZVC@Z`VC@@ZVC@`Z`VC@ ZWC@@Z$XC@Z}XC@ܬZXC@ ʬZ`mYC@ZZC@SZ ZC@7Z[C@@7Z\C@@RZ\C@SZ}]C@dZ`^C@@ZZn^C@@Z@^C@`Z^C@Z5_C@Z_C@ Z_C@}Z<`C@%Zg`C@ Z =`C@IZ[`C@ Z$aC@$ZHC@@Z`C@ZPC@@Z PC@Z@QC@ Z`RC@Z RC@@Z"QC@ZMC@ZJC@lZJC@kZJC@Z&JC@Z`IC@ LZ`IC@RZHC@@Z`HC@`ZHC@Z.IC@@ZBIC@"Z`IC@@>ZIC@mZjIC@@ZIC@Z JC@ lZ`&JC@@ZJC@@Z eKC@Z`hKC@ZKC@ Z`BLC@;ZLC@@FZgMC@@Z`MC@;Z`MC@AZNC@2Z!OC@ vZOC@@Z`OC@ 8Z PC@JZ@nPC@rZ`hPC@Z@PC@Z cQC@ OZ@QC@ ]Z@RC@`@Z@ASC@bZSC@kZTC@@*Z%UC@@/Z@|UC@Z`VC@ ZVC@@:Z`2WC@ 0Z@WC@ Z WC@ Z?XC@ZXC@ZsYC@ZYC@(ZYC@Z`YC@ZAZC@Z`"[C@ ~Z`[C@@MZ[C@XZ`$\C@`ZS]C@Z@{]C@ gZ]C@mZ]^C@7Z@^C@Z`C@Z``C@@Z`l_C@Z@6_C@Z^C@`PZ@O^C@Z]^C@Z@]C@?Z]C@@tZ?]C@Z T\C@Z \C@"Z`\C@vZ\C@Z\C@Z \C@@Z[C@@!Z[C@Z8[C@Z [C@ZZC@qZ ZC@@Z qZC@LZ`CZC@(Z@YC@ZYC@Z YC@@YZ /YC@ZXC@Z@YC@@ZYC@ZXC@ZXC@ZyXC@7ZtXC@WZ`-XC@VZWC@ Z`UWC@Z@VC@Z@{VC@Z=VC@Z@UC@ Z UC@ Z6UC@[Z>UC@^ZTC@Z TC@`Z>UC@nZUC@@ZHUC@Z`fTC@"Z@-TC@iZQSC@@rZSC@ >ZRTC@ IZTC@kZNTC@ZKTC@ZSC@ZZSC@`Z@ SC@Z@RC@@Z@RC@.ZRC@@EZRC@^Z@xRC@wZ^RC@ Z QRC@ZSRC@ZRC@`ZRC@`ZRC@Z SC@`#ZSC@@HC@Z@HC@Z HC@ZHC@Z@HC@Z HC@@ZIC@Z-IC@`ZXIC@%Z{IC@CZ wIC@jZIC@wZIC@`Z@JC@ZdJC@Z lJC@Z aJC@`Z#JC@`Z JC@@ZJC@1ZIC@@CZIC@aZ@IC@dZ}IC@sZ@kIC@Z(IC@Z`HC@Z HC@Z XHC@`HZ@NHC@bZ HC@ZHC@`ZMHC@ ZTHC@ZHC@ ZJHC@"Z HC@@*ZHC@@'Z GC@zZGC@@tZ@GC@ Z GC@Z HC@RZHC@ LZ`IC@Z`IC@Z&JC@kZJC@lZJC@ZJC@ZMC@@Z"QC@Z RC@ Z`RC@Z@QC@@Z PC@ZPC@& Z$C@@tZ Y:C@}Z3C@iZ 5C@@KZo5C@@RZ6C@ZH6C@Z |6C@Z6C@̀Z6C@Z 6C@`rZc6C@ுZ J6C@Z6C@ Z6C@ ؁ZZ7C@Z7C@ kZ 7C@ FZ 8C@&ZN8C@ Z8C@Z8C@fZ8C@MZ@K9C@ Z:C@ Z(:C@`Z Y:C@~ZL:C@~Z`9C@~Z\9C@~ZR9C@~Z8C@O~Z8C@ }Z8C@`}Z@68C@`~Zv7C@G}Z5C@}Z'6C@|Z a6C@|Z 6C@@|Z 7C@|Zc7C@ C|Z@U7C@{Z 7C@{Z 7C@{Z 6C@~{Z5C@zZ5C@zZk5C@zZJ5C@@zZ@5C@zZ4C@zZ@4C@@zZ@4C@ MzZ 4C@ HzZ3C@yZ 3C@yZ 53C@yZ2C@!yZ2C@xZ2C@\xZv2C@ /xZ2C@wZ2C@wZq2C@ wZ#2C@` wZ 2C@@wZ/1C@wZI0C@vZ/C@ vZ/C@wvZT/C@cvZ.C@vZ@.C@tvZ-C@vZ ,C@vZo,C@AvZ`a,C@uZ`&,C@uuZ`a+C@tZh*C@tZ[*C@tZ@A*C@@tZ )C@@1uZ B)C@BuZ(C@ uZY(C@uZ`'C@`uZ &C@uZ@%C@GvZ`W%C@vZ!%C@ vZ$C@xwZ %C@wZ%C@wZB&C@wZ?&C@`xZ@%C@ xZ&C@xZu'C@yZ@9)C@yZ)C@`zZ)C@ zZV*C@{Zf*C@{Z6+C@`{Z+C@R}Z.C@P}Z /C@,}Z`O0C@Q}Z0C@U}Zh1C@g}Z@1C@@~Z@|1C@@~Z`0C@~Z0C@Z1C@`BZN2C@Z2C@Z2C@Z2C@ Z:3C@Z ]3C@{Zd3C@ୀZ2C@Z2C@Z3C@'XvZ C@`lZ(C@HvZ@(%C@uZ%C@uZ^&C@ uZ&C@@FuZ'C@tZ=(C@tZ(C@tZ(C@jtZ'C@%tZ'C@ tZa'C@@sZ@'C@esZ'C@rZ'C@rZ |'C@rZ`I'C@rZ&C@@rZ`&C@5rZK'C@ rZ}'C@qZ'C@qZ`'C@ ]qZ 'C@4qZ'C@`pZ(C@`oZ 'C@oZ+'C@@loZ&C@@oZ`z&C@nZ@\&C@nZ6&C@ nZ`&C@nZ%C@dnZ%C@LnZ&C@"nZ`%C@mZ@I%C@{mZ$C@jmZ$C@\mZ$C@5mZ#C@lZ@#C@lZ T#C@`lZ "C@@-mZ!C@mZY!C@nZ.!C@NnZ`7!C@0oZ@6 C@@UoZC@oZ`kC@oZ`C@UpZ C@ppZC@pZ`BC@@qZ`C@qZ{C@`C@>jZC@ xjZhC@@jZeC@@jZ C@jZ@qC@`jZAC@ jZ@C@ jZC@`jZ.C@5kZ`yC@ kZ@C@`kZ@;C@TlZhC@ slZC@{lZ@C@`lZGC@ lZ`C@mZ`C@ mZC@XnZkC@`oZ C@`#oZ C@nZzC@!nZC@*ZB@ZC@S Z %C@`ZI C@Z C@=Z` C@@BZ C@?ZC@`2ZV C@Z C@Z` C@Z C@`Z C@@2Z C@`ZGC@Z'C@Z@C@Z@BC@ ZC@ZHC@ HZC@`ZC@`ZC@ZC@ZC@ZC@ZC@@ZC@ZC@Z@C@Z@oC@Z@C@@AZ`9C@@Z UC@=Z@ C@`\Z@ C@ Z C@ Z C@-h@+iZB@X^Z C@LcZB@`vcZrB@cZB@cZB@GdZB@ eZ_B@,eZ`B@5eZ@zB@5eZ JB@ZeZB@fZ B@"fZSB@ "fZ#C@4fZ C@ >fZ C@*fZ yC@ fZC@@eZWC@B@ZB@׶Z@B@ZB@`Z B@@Z@B@@ZpB@pZ`B@@ZJB@ZB@ZB@CZ`mB@ZrB@`[@B@ G[@MB@[B@[`B@[ B@'[GB@ [B@`[B@[ B@ [8B@[B@ [B@@;[B@0[`}B@ C[B@@&[`B@[+B@[B@_[B@9[ B@` [@RB@@[ B@y[B@@@[B@`-[_B@J[B@@K[B@`8[B@ [B@ [B@[@bB@[@B@H[B@ [%B@ [ B@ [B@[ BB@[B@$[RB@ I[B@[B@N[@/B@[@B@ [ B@3X@eZB@ _[Z B@hLcZB@ cZB@`bZ B@`bZ@B@ bZ@B@`raZ B@`Z&B@q`ZB@`ZB@_Z B@`K_ZB@`^ZB@ q^Z BB@p^Z>B@.^ZB@ _^Z@B@@i^ZfB@V^ZB@]ZB@`]ZIB@ \ZB@7\ZZB@/\ZAB@[Z B@ h[ZB@ _[ZB@[Z@B@`[Z`AB@@[ZB@@[ZB@ [ZfB@[ZB@ [Z`B@[ZB@@\ZB@@i\ZcB@`y\ZNB@ \Z B@\ZB@ \Z`uB@ \ZB@`\ZB@\ZB@`\Z@`B@\ZB@`\ZB@\Z`B@\ZB@@Q]Z8B@]ZB@@\Z:B@\ZjB@\Z}B@A]ZTB@`{]ZB@ (^ZB@ u^ZB@`^ZBB@a_ZB@(`Z@B@W`ZiB@`ZHB@`ZB@aZ4B@ aZ{B@aZ rB@y`Z B@ j`ZB@_Z kB@@_ZB@@_ZPB@_Z@qB@ dZ rB@dZB@dZDB@@eZGB@dZ@B@ ldZ=B@qdZB@dZ2B@cZcB@ cZ`B@cZB@@cZ&B@cZB@QcZB@0cZ'B@@+cZyB@cZB@ bZB@bZB@bZB@`bZ`!B@ bZrB@@bZ`B@bZ PB@bZ B@bZ B@bZ B@ cZB@1cZ'B@CcZB@8cZ`B@LcZB@4@FZ^B@">Z}B@EFZ`B@EZ`B@`GEZGB@@rEZB@6EZB@ DZ@B@@&DZ B@aCZ B@ CZ B@5CZ5B@pBZoB@BZB@@BZB@BZ3B@@C@Z}B@?Z@=B@|?Z@B@`>Z`B@>Z B@ 3>Z|B@O>ZB@ >Z@B@>ZIB@@>ZB@Q>Z`B@@&>ZB@>Z +B@ "?ZB@>Z,B@q>Z@B@">ZB@@)>ZB@`}>ZB@g>Z@QB@`>Z`zB@>Z@B@>ZnB@>ZB@>ZkB@>Z`B@ '>ZB@->ZBB@~>ZDB@`>ZB@>ZB@`?ZB@ ?ZB@ ?ZB@@?ZfB@ ?Z B@`b?ZBB@ g?ZB@@?Z@B@`?ZDB@?ZB@=@Z^B@AZB@ AZ@sB@AZNB@CZDB@BZB@BZB@*CZ?B@ CZ@B@xCZ` B@MDZB@EZB@ EZZB@FZ`B@5`^ZAB@ZZB@/\ZAB@7\ZZB@ \ZB@`]ZIB@]ZB@V^ZB@@i^ZfB@ _^Z@B@.^ZB@p^Z>B@ q^Z BB@`^ZB@^ZB@]Z@B@``]ZB@I]ZB@J]ZB@`d]ZB@\ZB@|[Z0B@ [ZlB@[ZB@ZZ@B@`4[ZB@[ZJB@/\ZAB@6[B@ ZB@3[ 7B@[B@ y[{B@@[nB@`[>B@{[B@@[B@[@FB@[}B@*[@B@"[B@~[@(B@>[OB@Z LB@`YZB@Z8B@ZB@`@Z B@ZB@LZ@jB@6ZB@ZB@`ZB@rZ B@@ZB@ZB@Z`B@ ZB@ZB@(Z ?B@$Z`B@ wZ`B@=Z^B@ZIB@AZB@@?ZB@ZB@[uB@@[B@ [ #B@ X[mB@`{[B@[@@B@[B@N[B@@[B@[?B@O[@B@[B@H[`B@[ 7B@70`t[B@q[@B@[B@`t[rB@a[B@`:[ B@ [B@[>B@[@B@[B@ [B@V[B@?[`B@[rB@~[ B@R~[B@t}[B@@|[6B@|[ B@|[B@6|[B@`|[@rB@`{[nB@m{[`B@` {[@B@@0z[PB@ x[B@$w[B@v[B@ nv[` B@@*t[`B@s[@B@Ds[@B@`r[B@@r[B@r[ dB@ s[B@`s[dB@u[@B@ v[ B@`w[B@`*w[ B@5w[3B@`1v[`JB@`u[`SB@s[~B@(s[>B@ r[lB@r[B@`zs[@^B@`hs[@B@`s[B@Qr[B@r[3B@q[B@q[KB@ms[\B@s[@B@t[@B@Yu[B@`u[B@Ov[B@e~[B@~[ B@~[@{B@[B@ F[ B@[cB@@ [ B@ [$B@v[B@s[B@[B@@[[B@Z[JB@s[eB@ [B@ـ[HB@π[B@[`B@[B@8@iZ`B@_ZB@%dZB@ dZ rB@_Z@qB@@_ZPB@s`ZOB@t`Z B@``Z@B@``ZB@t`Z`B@n`Z2B@`Z`PB@`ZB@`ZB@`ZB@ aZ@UB@aZaB@_aZB@bZB@bZ B@cZ`B@eZB@eZ B@gZB@gZbB@gZ dB@ gZB@~gZ@B@`>iZ@B@=iZB@iZB@`iZ!B@fZ1B@ PfZ @B@@MfZ`B@eZdB@eZB@dZB@9`\Z B@`ZZ)B@\ZB@`\Z@`B@\ZB@`\ZB@ \ZB@ \Z`uB@\ZB@ \Z B@`y\ZNB@@i\ZcB@@\ZB@[ZB@ [Z`B@[ZB@ [ZfB@@[ZB@@[ZB@ m[Z&B@[Z)B@ZZ`B@`ZZB@ZZ B@ ZZ6B@ [ZB@h[Z`B@ [Z B@)\ZB@\ZB@:hZ@1B@lZB@  ZB@BZB@Z@B@ZB@6ZB@Z'B@pZ`B@ZB@ ZJB@ZB@@9ZB@ZB@ZB@tZ B@ ZB@ZB@ {ZB@Z'B@KZB@ZPB@Z`B@Z;B@PZMB@Z oB@`Z@B@xZHB@`ZB@Z@eB@~Z B@hZB@WZB@@ZB@?ZB@ZYB@ QZB@@Z 6B@Z@gB@}ZB@`lZB@Z`B@Z@B@@Z uB@ZB@Z@B@ZB@0Z`6B@Z -B@!ZoB@ EZB@@Z?B@Z B@`ZB@ZB@Z B@ zZ B@ZB@ fZ@B@Z@B@Z@ B@ZB@vZ B@Z B@Z`=B@NZ`B@BZB@ ZJB@@^Z` B@ZB@`ZzB@ZB@Z5B@Z7B@mZeB@ZB@PZB@vZTB@ZB@ ZtB@Z@*B@ZB@LZ@kB@ Z@B@Z`B@Z B@`ZB@@ZB@ ?ZB@GZ7B@ZdB@Z`B@ZB@]ZB@ ZB@/Z gB@ZB@Z`tB@JZ`;B@ ZiB@ϾZ@B@:Z pB@Z B@ ,Z tB@οZB@`^ZB@Z@hB@ ZB@ ZB@ZGB@iZB@ZB@Z PB@6ZB@ֻZ@ B@Z`B@`]Z*B@ĸZB@Z]B@`YZ`aB@Z B@Z`B@_ZB@ZvB@PZB@`Z%B@=Z&B@_Z`B@@Z B@lZ` B@ଲZpB@@Z B@]ZB@ Z`B@=ZB@ Z B@7Z&B@bZB@`DZdB@`KZB@ZB@`ZſB@`2Z[B@@YZ`B@lZ@EB@`Z۾B@FZB@Z@B@ ZCB@ ~Z@B@ݺZļB@ZYB@CZKB@8ZB@ZZRB@ZZsB@ ZZB@ZZB@`ZZ`B@`ZZ`B@YZB@XZB@XZB@@XZjB@XZEB@ XZ 7B@@XZ B@XZ B@XZ]B@ XZB@XZB@XZ`B@XZ B@@=XZB@` XZB@XZB@WZ B@ WZB@WZ nB@`GXZB@XZB@XZB@`YZB@ AYZB@@zYZ@B@@ZZB@`qZZB@9[ZB@\Z B@;\Z`B@u\Z`B@\Z@B@5]Z` B@]Z@B@ ^ZB@`^ZB@S^ZB@&^Z mB@ ;^Z@ B@(^Z`B@^ZB@ ^ZB@]ZB@]ZB@ ]Z@B@+\ZB@@\Z@B@>@ZB@Z@B@vZ B@`5ZB@\Z@B@ Z`CB@Z B@ Z "B@Z@B@ZB@`QZB@ZB@Z SB@@Z`B@ZB@@EZdB@ZB@ bZ0B@@Z[B@vZ B@?d\ bB@T\6B@2U\ ֲB@yV\ B@`W\XB@3W\iB@`V\཯B@V\B@W\B@X\@ӪB@?Y\`XB@`Y\B@ .[\`B@)]\B@w^\ B@@Y_\ bB@_\}B@`\஦B@ a\B@a\B@ b\%B@ @b\`MB@b\`FB@c\@߬B@d\@B@c\B@b\̯B@`\B@_\@B@|]\ ¯B@`b]\B@]\)B@r]\B@ \\ ùB@[\`B@rZ\oB@Y\bB@`W\B@NV\6B@ U\B@U\B@@U\`AB@`T\`B@ 0T\@B@T\fB@?T\`¶B@T\ B@+U\ dB@lU\@B@MU\B@U\B@U\ ֲB@@/Z`ԈB@ZB@`ZB@Z@B@`ZB@5ZնB@Z_B@ZdB@EZ@hB@@kZ`,B@ZळB@ZB@ CZ@ƱB@hZ @B@ ZB@ ثZcB@`@Z tB@ ZuB@`yZࢯB@Z@B@pZ B@IZ`B@eZ@B@HZB@ Z`B@ QZ@ܭB@ZtB@@vZ ;B@*ZŬB@`)Z`?B@WZ QB@@DZ༪B@ Z B@Z>B@ZB@`Z@CB@aZ NB@@-Z`ܦB@ZB@eZĥB@`ZB@@-ZæB@ZҦB@nZB@ZລB@ZB@`סZB@`6Z5B@ৠZB@@,ZYB@֟ZZ@hB@ZB@Z@B@;Z@B@ tZ ˚B@@'ZB@@Z zB@@ZB@ƮZ`zB@ Z֘B@SZ@B@Z`B@ZPB@@ Z+B@:Z`iB@ :Z@B@pZB@@JZYB@(ZIB@ ;Z B@ྯZwB@ZhB@@ȰZ B@`Z@`B@ZࡎB@;ZaB@@Z@ԐB@mZKB@ ZB@вZ`̓B@ ȲZB@`ZYB@>Z@B@QZB@?Z]B@ZԙB@ϲZ B@AZB@ аZ 5B@ZB@`Z@B@Z NB@ ZNB@Z`B@ NZB@ZB@`lZB@`cZB@`ZB@`̮Z-B@QZ:B@ ZuB@Z`B@eZcB@ JZBB@ _ZӡB@ZB@ٯZ yB@|ZdB@ZB@Z@B@ͱZğB@@ZB@ ZB@Z B@Z@}B@kZB@kZ`fB@ZܥB@`[Z TB@ ZB@ZԧB@]Z ,B@ƲZB@ Z jB@|Z wB@`ڵZ+B@Z B@ZשB@ZMB@`ZӪB@Z@B@`Z B@öZB@ֶZ@B@@/Z,B@/Z B@ZϯB@Z B@ZVB@୵ZeB@ݳZ`lB@`B@ _#[4B@ [>B@ [>B@ [`B@ [ಢB@`![`B@_#[B@ _#[*B@_#[4B@Fiona-1.8.21/tests/data/coutwildrnp.shx000066400000000000000000000011741420023252700200100ustar00rootroot00000000000000' >d\`ԈB@">ZD@2 `X^h !#*p$0%0)*,3j:XARC@EZH NPUJ\^bejJlmqxx|x8VZXbPj@h@: ^ X&@jV 0>@~hǶȺ(˒>B>PՒ`Fiona-1.8.21/tests/data/curves_line.csv000066400000000000000000000166071420023252700177540ustar00rootroot00000000000000WKT,SHAPE_Length "MULTILINESTRING ((-1.02439 48.4878,2.471545 48.45528))",3.49608621305261 "MULTICURVE (COMPOUNDCURVE ((-0.9105691 47.21951,1.414634 47.17073),CIRCULARSTRING (1.414634 47.17073,2.423818 47.48377,1.407531 46.72668),(1.407531 46.72668,-0.9243407 46.72668)))",8.39459167219456 "MULTICURVE (COMPOUNDCURVE (CIRCULARSTRING (-0.3902439 46.42109,0.2422325 45.78862,-0.3902439 45.15614,-1.02272 45.78862,-0.3902439 46.42109)))",3.97396663612273 "MULTILINESTRING ((2.404137 38.88428,2.475991 38.93491,2.54878 38.98351,2.622149 39.02986,2.69574 39.07372,2.769195 39.11488,2.842157 39.15314,2.914269 39.18832,2.98518 39.22023,3.054546 39.24874,3.122027 39.2737,3.187295 39.29498,3.250033 39.31248,3.309934 39.32613,3.366707 39.33584,3.420075 39.34158,3.469778 39.34331,3.515575 39.34104,3.557241 39.33476,3.594574 39.32451,3.627391 39.31034,3.655534 39.29232,3.678865 39.27053,3.697271 39.24509,3.710661 39.21612,3.71897 39.18375,3.722158 39.14814,3.72021 39.10948,3.713135 39.06794,3.700967 39.02373,3.683765 38.97707,3.661614 38.92818,3.634622 38.87729,3.602919 38.82467,3.566661 38.77056,3.526023 38.71523,3.481205 38.65895,3.432424 38.60199,3.379918 38.54462,3.323942 38.48714,3.26477 38.42982,3.20269 38.37293,3.138004 38.31676,3.071028 38.26158,3.002087 38.20766,2.931517 38.15525,2.859663 38.10463,2.786874 38.05602,2.713506 38.00968,2.639914 37.96582,2.566459 37.92466,2.493498 37.88639,2.421386 37.85122,2.350474 37.8193,2.281109 37.79079,2.213628 37.76584,2.148359 37.74456,2.085621 37.72705,2.02572 37.71341,1.968947 37.70369,1.915579 37.69795,1.865876 37.69622,1.82008 37.6985,1.778414 37.70477,1.741081 37.71502,1.708263 37.72919,1.68012 37.74721,1.656789 37.769,1.638384 37.79444,1.624994 37.82342,1.616684 37.85579,1.613496 37.89139,1.615444 37.93006,1.62252 37.97159,1.634688 38.0158,1.651889 38.06247,1.67404 38.11136,1.701033 38.16224,1.732735 38.21486,1.768994 38.26897,1.809631 38.3243,1.854449 38.38059,1.90323 38.43755,1.955737 38.49491,2.011712 38.55239,2.070884 38.60972,2.132964 38.6666,2.19765 38.72277,2.264627 38.77795,2.333568 38.83188,2.404137 38.88428))",5.67762431364471 "MULTILINESTRING ((-0.6666667 44.03252,-0.6056813 44.10943,-0.5428571 44.18257,-0.4782797 44.25197,-0.4120346 44.31763,-0.3442073 44.37959,-0.2748833 44.43785,-0.2041482 44.49243,-0.1320875 44.54336,-0.0587867 44.59064,0.0156686 44.63431,0.091193 44.67437,0.167701 44.71084,0.2451069 44.74375,0.3233253 44.77311,0.4022706 44.79894,0.4818574 44.82125,0.5620001 44.84007,0.6426132 44.85542,0.7236111 44.8673,0.8049083 44.87574,0.8864194 44.88076,0.9680587 44.88238,1.049741 44.88061,1.13138 44.87547,1.212891 44.86698,1.294188 44.85516,1.375186 44.84003,1.455799 44.8216,1.535942 44.79989,1.615529 44.77492,1.694474 44.74671,1.772693 44.71528,1.850099 44.68064,1.926607 44.64281,2.002131 44.60182,2.076586 44.55767,2.149887 44.51039,2.221948 44.46,2.292683 44.4065))",3.34511332340398 "MULTICURVE (COMPOUNDCURVE ((-1.300813 42.89431,0.3902439 43.31707),CIRCULARSTRING (0.3902439 43.31707,1.4163 43.74383,2.455285 43.34959),(2.455285 43.34959,2.455121 43.34941,2.454636 43.34885,2.453842 43.34794,2.452751 43.34666,2.451373 43.34503,2.44972 43.34305,2.447803 43.34073,2.445634 43.33807,2.443223 43.33507,2.440583 43.33175,2.437724 43.3281,2.434658 43.32413,2.431396 43.31985,2.42795 43.31526,2.42433 43.31037,2.420549 43.30517,2.416617 43.29968,2.412546 43.2939,2.408347 43.28784,2.404032 43.28149,2.399611 43.27487,2.395096 43.26799,2.390499 43.26083,2.385831 43.25342,2.381103 43.24575,2.376327 43.23783,2.371513 43.22966,2.366673 43.22126,2.361819 43.21262,2.356962 43.20374,2.352112 43.19465,2.347283 43.18533,2.342484 43.17579,2.337727 43.16605,2.333023 43.1561,2.328385 43.14594,2.323822 43.13559,2.319347 43.12505,2.314971 43.11433,2.310704 43.10342,2.30656 43.09233,2.302547 43.08108,2.29868 43.06965,2.294967 43.05807,2.291421 43.04633,2.288054 43.03443,2.284875 43.02239,2.281898 43.01021,2.279132 42.99789,2.27659 42.98543,2.274283 42.97285,2.272222 42.96015,2.270418 42.94733,2.268883 42.9344,2.267628 42.92136,2.266664 42.90821,2.266003 42.89497,2.265657 42.88164,2.265635 42.86821,2.265951 42.85471,2.266614 42.84112,2.267637 42.82747,2.26903 42.81374,2.270806 42.79995,2.272975 42.7861,2.275548 42.7722,2.278538 42.75825,2.281955 42.74426,2.28581 42.73022,2.290116 42.71616,2.294883 42.70206,2.300123 42.68794,2.305847 42.6738,2.312066 42.65965,2.318792 42.64548,2.326036 42.63132,2.333809 42.61715,2.342123 42.60299,2.350989 42.58883,2.360418 42.5747,2.370422 42.56058,2.381012 42.54649,2.392199 42.53243,2.403995 42.5184,2.416411 42.50442,2.429458 42.49047,2.443148 42.47658,2.457492 42.46274,2.472501 42.44896,2.488187 42.43525,2.504561 42.4216,2.521634 42.40803,2.539418 42.39454,2.557924 42.38113,2.577163 42.36781,2.597146 42.35459,2.617886 42.34146),(2.617886 42.34146,2.636783 42.32997,2.656209 42.31853,2.676146 42.30716,2.696577 42.29584,2.717483 42.28458,2.738844 42.27338,2.760644 42.26223,2.782863 42.25113,2.805484 42.24007,2.828486 42.22907,2.851854 42.21811,2.875567 42.2072,2.899607 42.19633,2.923957 42.18549,2.948598 42.1747,2.97351 42.16394,2.998677 42.15321,3.024079 42.14252,3.049698 42.13186,3.075516 42.12122,3.101515 42.11062,3.127675 42.10003,3.153979 42.08947,3.180407 42.07893,3.206943 42.06841,3.233567 42.0579,3.260261 42.04741,3.287006 42.03694,3.313785 42.02647,3.340578 42.01602,3.367367 42.00557,3.394135 41.99512,3.420862 41.98468,3.44753 41.97424,3.474121 41.96381,3.500616 41.95336,3.526997 41.94292,3.553246 41.93247,3.579344 41.92201,3.605273 41.91154,3.631014 41.90106,3.656549 41.89057,3.681859 41.88006,3.706927 41.86953,3.731733 41.85898,3.75626 41.84841,3.780489 41.83782,3.804401 41.82721,3.827979 41.81656,3.851203 41.80589,3.874056 41.79519,3.896518 41.78445,3.918573 41.77368,3.9402 41.76288,3.961382 41.75203,3.982101 41.74115,4.002337 41.73022,4.022073 41.71925,4.041291 41.70823,4.059971 41.69717,4.078095 41.68605,4.095646 41.67488,4.112604 41.66366,4.128951 41.65239,4.144669 41.64105,4.15974 41.62966,4.174144 41.6182,4.187864 41.60669,4.200882 41.5951,4.213178 41.58345,4.224734 41.57174,4.235533 41.55995,4.245555 41.54808,4.254783 41.53615,4.263197 41.52413,4.27078 41.51204,4.277512 41.49987,4.283376 41.48762,4.288354 41.47528,4.292426 41.46285,4.295575 41.45034,4.297782 41.43774,4.299028 41.42504,4.299296 41.41226,4.298567 41.39937,4.296822 41.38639,4.294043 41.37331,4.290211 41.36013,4.285309 41.34685,4.279318 41.33346,4.272219 41.31996,4.263995 41.30635,4.254626 41.29264,4.244094 41.27881,4.232381 41.26486,4.219468 41.2508,4.205338 41.23662,4.189971 41.22232,4.17335 41.2079,4.155455 41.19336,4.136269 41.17868,4.115773 41.16388,4.093948 41.14896,4.070777 41.13389,4.046241 41.1187,4.020321 41.10337,3.993 41.0879,3.964258 41.07229,3.934077 41.05654,3.902439 41.04065),CIRCULARSTRING (3.902439 41.04065,1.775383 40.65987,0.3414634 42.27642)))",12.2623236074563 "MULTILINESTRING ((-0.2762998 38.32375,-0.2637102 38.43947,-0.2447018 38.55117,-0.2193601 38.65833,-0.1877989 38.76047,-0.1501601 38.85714,-0.1066131 38.94789,-0.0573536 39.03233,-0.002603 39.11007,0.0573925 39.18076,0.1223631 39.24409,0.1920168 39.29976,0.2660403 39.34754,0.3441008 39.3872))",1.29261161044762 "MULTICURVE (COMPOUNDCURVE (CIRCULARSTRING (-1.389372 40.02584,-1.109435 40.65503,-0.4250745 40.73184),CIRCULARSTRING (-0.4250745 40.73184,-0.2233581 40.09231,0.4014657 40.33579)),COMPOUNDCURVE (CIRCULARSTRING (0.9008338 40.26691,1.138662 40.45594,1.434641 40.38745)))",3.57349361227513 "MULTILINESTRING ((1.383736 39.35035,1.012627 38.5647,0.5434618 37.97689,-0.0220862 37.58902))",2.3133339931156 Fiona-1.8.21/tests/data/gre.cpg000066400000000000000000000000061420023252700161530ustar00rootroot00000000000000UTF-8 Fiona-1.8.21/tests/data/gre.dbf000066400000000000000000000054221420023252700161440ustar00rootroot00000000000000_AflagCPnameCPname_csCPname_deCPname_enCPname_eoCPname_frCPname_fyCPname_hrCPname_nlCPname_ruCPname_slCPname_taCPname_ukCPboundaryCPname_tzlCPtimezoneCPwikidataCPISO3166-1CPwikipediaCPadmin_leveCPis_in_contCPISO3166-1_CPISO3166-_1CPISO3166-_2CP http://upload.wikimedia.org/wikipedia/commons/b/bc/Flag_of_Grenada.svg Grenada Grenada Grenada Grenada Grenado Grenade Grenada Grenada Grenada Гренада Grenada கிரெனடா Гренада administrative Grenada America/Grenada Q769 GD en:Grenada 2 North America GD GRD 308 Fiona-1.8.21/tests/data/gre.prj000066400000000000000000000002171420023252700162010ustar00rootroot00000000000000GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]Fiona-1.8.21/tests/data/gre.shp000066400000000000000000000154741420023252700162130ustar00rootroot00000000000000' JOOn'@+N:|1)@ hJOOn'@+N:|1)@+NZ)@7,NI +)@0 /NQ)@6NPn)@>N/$)@_,gJN"(@XNrh(@NWsN`"(@9N+(@ *N~j(@PіNl(@\bNV-(@yN!rh(@ X3NQ(@7vYNx(@INCl(@˹^䯗NK7A`(@[NM(@ INZd;(@bhN\((@#N'1(@4`Njt(@CœENףp= (@zܞ{N/$(@nݳN(@(hNOn(@I?NMb(@-~N+(@WiךN(@z]uN%C(@U/}NK7(@ɛNHz(@3Nʡ(@9"%Nkt(@ZJ;ݜN(\(@t>+8N/$(@ ]N"(@%rN> ףp(@cyNCl(@{N|?5^(@e _NFԸ(@:p՟NK(@r>@RN(\µ(@ՠNObX9(@yPNʡE(@4"MNl(@]#N㥛 (@yNGz(@]F?NC(@FN^I (@J?Nx(@hh6N-(@yՏNgffff(@rN/ݤ(@y3MNS(@KfNoʡ(@pNT㥛Ġ(@vbcNZd;(@f<ޮN|?5(@r#N&1(@qqtNS㥛(@X:zWNB`"(@bX?N +(@DNףp= (@)ŎơNE(@uRNl(@A1kN7A`(@iTQN(\(@ŽC-NZd;O(@PoNƋ(@)XN ףp=(@CNbX9(@M񸨪NI +(@Nʡ(@Njt(@˫NPn(@ BNK7(@Q>N⽬N(@*Nrh|(@~NzGz(@jNEx(@-NQN(\u(@w`CNʡEs(@*9Nlq(@(1NU-o(@f_+N rhm(@o5)NQk(@DB)Nxi(@%̴+NClg(@×.i1NK7A`e(@>NMb(@1JNK7A`(@7dYN|?5^(@L!uNA`"[(@d-ꊬN +Y(@(Nףp= W(@*ͬNS(@-^rNRQ(@EuNZd;O(@INL(@HFsNnJ(@<~oӟNbX9H(@4NʡE(@)z;N$C(@oz5UNK7A(@?եNrh|?(@saϮN= ףp=(@[;QNZd;(@CFN`"9(@Vl5N+7(@ɯNʡE6(@ۅNNbX94(@`0$YN 2(@u&"Nq= ף0(@BwNv/(@&z7XNV-(@U&NQ+(@r"Nx)(@)]aN-'(@PkwNx&(@_ѭNp= #(@ղNo!(@fN:v(@#d .NV-(@]zNv(@d[GN~jt(@NI +(@71$'NGz(@!gNOn(@hN7A`(@EeN+(@'NZd;O (@gUN (@x-בN ףp= (@XANx&1(@W l)N-(@)Nx&'@'acNq= ף'@^EFNv'@hN!rh'@ZsNy&1'@oN^I '@*x+N"~'@DNx'@q99N r'@׃7Nx'@s NS'@wWN/ݤ'@EfӶNS'@9͂NM'@ՈNzG'@R:GӏNK7A'@Ps'N:v'@ԅpNQ'@%N|?5'@iNw/'@N&1'@8 QNS㥛'@NB`"'@]Nv'@DN'1'@pSN'@\N +'@¯zN~jt'@g+NMb'@RN~jt'@mNl'@ @-\NOn'@(NOn'@uNl'@܇N~jt'@ ZN'@ D NGz'@xNE'@49k1N/$'@uNt'@@v(NI +'@\XNףp= '@_qcNMb'@uDN +'@ 1N'1'@>;bNB`"'@N]('@^Nw/'@ĐANQ'@`N:v'@heNzG'@W|PNA`Т'@@MN1Z'@+NS'@"NCl'@w3N)\'@ٴN"~'@1M"Ny&1'@ЊNGz'@mNV-'@vp Nl'@IQg!NʡE'@[aUNʡE'@Cҵ@NFԸ'@N|?5^'@NCl'@$TNˡE'@&,N'@qNK7'@hNPn'@V4qNS㥛'@ 6uN-'@,)pNcX9'@nNn'@덵N'@,|N(\'@7+Nv/(@W6Nl1(@?7oNNbX94(@Л NX96(@sNMbX9(@w6NCl;(@vKrNX9v>(@BN/$A(@U6NK7A(@KdNS㥛D(@`2NGzG(@6SNK(@;PNʡE(@IN,η(@~*ZNFԸ(@WN|G(@c2Nrh(@|dNY9v(@fNsh|(@-iNn(@lNK7(@d\*NPn(@;Nkt(@I̶.Nʡ(@/N-(@);NI +(@YTz\NK7(@>xN%C(@R5NZd;O(@ދN+(@7N7A`(@XrNm(@d`xNE(@Qy9NJ +(@uۉNMb(@PN(@']kLNA`"(@D:N&1(@-N|?5(@)MAN9v(@dNzG(@ݯ|NA`(@* `NS(@SYNK7A`(@辜NCl(@;2Nx(@pNQ(@%6.NGz(@jinN䥛 (@&G%+NV-(@NNbX9(@PNʡ)@jxNbX9)@75JN )@,N+)@^NR)@ NE)@D˥LNMb)@~ 11.0 10.9 10.7 10.5 10.4 10.2 10.0 10.0 10.0 10.2 10.4 10.5 10.5 10.1 9.6 9.1 8.3 7.2 6.6 Fiona-1.8.21/tests/data/test_tin.csv000066400000000000000000000001701420023252700172530ustar00rootroot00000000000000WKT,id "TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))",1 "TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))",2 Fiona-1.8.21/tests/data/test_tin.dbf000066400000000000000000000002231420023252700172120ustar00rootroot00000000000000v AQWidCP 1 Fiona-1.8.21/tests/data/test_tin.shp000066400000000000000000000004201420023252700172500ustar00rootroot00000000000000' ???R???????Fiona-1.8.21/tests/data/test_tin.shx000066400000000000000000000001541420023252700172640ustar00rootroot00000000000000' 6???2RFiona-1.8.21/tests/data/test_tz.geojson000066400000000000000000000004371420023252700177750ustar00rootroot00000000000000{ "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": { "test": "2015-04-22T00:00:00+07:00" }, "geometry": { "type": "Point", "coordinates": [ -79.4, 43.6 ] } } ] } Fiona-1.8.21/tests/test__env.py000066400000000000000000000075251420023252700163470ustar00rootroot00000000000000"""Tests of _env util module""" import pytest try: from unittest import mock except ImportError: import mock from fiona._env import GDALDataFinder, PROJDataFinder from .conftest import gdal_version @pytest.fixture def mock_wheel(tmpdir): """A fake rasterio wheel""" moduledir = tmpdir.mkdir("rasterio") moduledir.ensure("__init__.py") moduledir.ensure("_env.py") moduledir.ensure("gdal_data/header.dxf") moduledir.ensure("proj_data/epsg") return moduledir @pytest.fixture def mock_fhs(tmpdir): """A fake FHS system""" tmpdir.ensure("share/gdal/header.dxf") tmpdir.ensure("share/proj/epsg") return tmpdir @pytest.fixture def mock_debian(tmpdir): """A fake Debian multi-install system""" tmpdir.ensure("share/gdal/{}.{}/header.dxf".format(gdal_version.major, gdal_version.minor)) tmpdir.ensure("share/proj/epsg") return tmpdir def test_search_wheel_gdal_data_failure(tmpdir): """Fail to find GDAL data in a non-wheel""" finder = GDALDataFinder() assert not finder.search_wheel(str(tmpdir)) def test_search_wheel_gdal_data(mock_wheel): """Find GDAL data in a wheel""" finder = GDALDataFinder() assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) def test_search_prefix_gdal_data_failure(tmpdir): """Fail to find GDAL data in a bogus prefix""" finder = GDALDataFinder() assert not finder.search_prefix(str(tmpdir)) def test_search_prefix_gdal_data(mock_fhs): """Find GDAL data under prefix""" finder = GDALDataFinder() assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) def test_search_debian_gdal_data_failure(tmpdir): """Fail to find GDAL data in a bogus Debian location""" finder = GDALDataFinder() assert not finder.search_debian(str(tmpdir)) def test_search_debian_gdal_data(mock_debian): """Find GDAL data under Debian locations""" finder = GDALDataFinder() assert finder.search_debian(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) def test_search_gdal_data_wheel(mock_wheel): finder = GDALDataFinder() assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) def test_search_gdal_data_fhs(mock_fhs): finder = GDALDataFinder() assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) def test_search_gdal_data_debian(mock_debian): """Find GDAL data under Debian locations""" finder = GDALDataFinder() assert finder.search(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) def test_search_wheel_proj_data_failure(tmpdir): """Fail to find GDAL data in a non-wheel""" finder = PROJDataFinder() assert not finder.search_wheel(str(tmpdir)) def test_search_wheel_proj_data(mock_wheel): """Find GDAL data in a wheel""" finder = PROJDataFinder() assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) def test_search_prefix_proj_data_failure(tmpdir): """Fail to find GDAL data in a bogus prefix""" finder = PROJDataFinder() assert not finder.search_prefix(str(tmpdir)) def test_search_prefix_proj_data(mock_fhs): """Find GDAL data under prefix""" finder = PROJDataFinder() assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) def test_search_proj_data_wheel(mock_wheel): finder = PROJDataFinder() assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) def test_search_proj_data_fhs(mock_fhs): finder = PROJDataFinder() assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) Fiona-1.8.21/tests/test_bigint.py000066400000000000000000000053641420023252700166730ustar00rootroot00000000000000"""OGR 64bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 Shapefile: OFTInteger fields are created by default with a width of 9 characters, so to be unambiguously read as OFTInteger (and if specifying integer that require 10 or 11 characters. the field is dynamically extended like managed since a few versions). OFTInteger64 fields are created by default with a width of 18 digits, so to be unambiguously read as OFTInteger64, and extented to 19 or 20 if needed. Integer fields of width between 10 and 18 will be read as OFTInteger64. Above they will be treated as OFTReal. In previous GDAL versions, Integer fields were created with a default with of 10, and thus will be now read as OFTInteger64. An open option, DETECT_TYPE=YES, can be specified so as OGR does a full scan of the DBF file to see if integer fields of size 10 or 11 hold 32 bit or 64 bit values and adjust the type accordingly (and same for integer fields of size 19 or 20, in case of overflow of 64 bit integer, OFTReal is chosen) """ import pytest import fiona from fiona.env import calc_gdal_version_num, get_gdal_version_num @pytest.mark.xfail(fiona.gdal_version.major < 2, reason="64-bit integer fields require GDAL 2+") def testCreateBigIntSchema(tmpdir): name = str(tmpdir.join('output1.shp')) a_bigint = 10 ** 18 - 1 fieldname = 'abigint' kwargs = { 'driver': 'ESRI Shapefile', 'crs': 'EPSG:4326', 'schema': { 'geometry': 'Point', 'properties': [(fieldname, 'int:10')]}} with fiona.open(name, 'w', **kwargs) as dst: rec = {} rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} rec['properties'] = {fieldname: a_bigint} dst.write(rec) with fiona.open(name) as src: if fiona.gdal_version >= (2, 0, 0): first = next(iter(src)) assert first['properties'][fieldname] == a_bigint @pytest.mark.skipif(get_gdal_version_num() < calc_gdal_version_num(2, 0, 0), reason="Test requires GDAL 2+") @pytest.mark.parametrize('dtype', ['int', 'int64']) def test_issue691(tmpdir, dtype): """Type 'int' maps to 'int64'""" schema = {'geometry': 'Any', 'properties': {'foo': dtype}} with fiona.open( str(tmpdir.join('test.shp')), 'w', driver='Shapefile', schema=schema, crs='epsg:4326') as dst: dst.write({ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': (-122.278015, 37.868995)}, 'properties': {'foo': 3694063472}}) with fiona.open(str(tmpdir.join('test.shp'))) as src: assert src.schema['properties']['foo'] == 'int:18' first = next(iter(src)) assert first['properties']['foo'] == 3694063472 Fiona-1.8.21/tests/test_binary_field.py000066400000000000000000000021531420023252700200370ustar00rootroot00000000000000import fiona import pytest import struct from collections import OrderedDict from .conftest import requires_gpkg @requires_gpkg def test_binary_field(tmpdir): meta = { "driver": "GPKG", "schema": { "geometry": "Point", "properties": OrderedDict([ ("name", "str"), ("data", "bytes"), ]) } } # create some binary data input_data = struct.pack("256B", *range(256)) # write the binary data to a BLOB field filename = str(tmpdir.join("binary_test.gpkg")) with fiona.open(filename, "w", **meta) as dst: feature = { "geometry": {"type": "Point", "coordinates": ((0, 0))}, "properties": { "name": "test", u"data": input_data, } } dst.write(feature) # read the data back and check consistency with fiona.open(filename, "r") as src: feature = next(iter(src)) assert feature["properties"]["name"] == "test" output_data = feature["properties"]["data"] assert output_data == input_data Fiona-1.8.21/tests/test_bounds.py000066400000000000000000000043241420023252700167040ustar00rootroot00000000000000import pytest import fiona from fiona.drvsupport import supported_drivers, _driver_supports_mode from fiona.errors import DriverError from .conftest import driver_extensions from fiona.env import GDALVersion def test_bounds_point(): g = {'type': 'Point', 'coordinates': [10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) def test_bounds_line(): g = {'type': 'LineString', 'coordinates': [[0, 0], [10, 10]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_polygon(): g = {'type': 'Polygon', 'coordinates': [[[0, 0], [10, 10], [10, 0]]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_z(): g = {'type': 'Point', 'coordinates': [10, 10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) ignore_write_drivers = set(['CSV', 'GPX', 'GPSTrackMaker', 'DXF', 'DGN', 'MapInfo File']) write_drivers = [driver for driver, raw in supported_drivers.items() if _driver_supports_mode(driver, 'w') and driver not in ignore_write_drivers] @pytest.mark.parametrize('driver', write_drivers) def test_bounds(tmpdir, driver): """Test if bounds are correctly calculated after writing """ if driver == 'BNA' and GDALVersion.runtime() < GDALVersion(2, 0): # BNA driver segfaults with gdal 1.11 return extension = driver_extensions.get(driver, "bar") path = str(tmpdir.join('foo.{}'.format(extension))) with fiona.open(path, 'w', driver=driver, schema={'geometry': 'Point', 'properties': [('title', 'str')]}, fiona_force_driver=True) as c: c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (1.0, 10.0)}, 'properties': {'title': 'One'}}]) try: bounds = c.bounds assert bounds == (1.0, 10.0, 1.0, 10.0) except Exception as e: assert isinstance(e, DriverError) c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (2.0, 20.0)}, 'properties': {'title': 'Two'}}]) try: bounds = c.bounds assert bounds == (1.0, 10.0, 2.0, 20.0) except Exception as e: assert isinstance(e, DriverError) Fiona-1.8.21/tests/test_bytescollection.py000066400000000000000000000157751420023252700206300ustar00rootroot00000000000000"""Tests for ``fiona.BytesCollection()``.""" import pytest import six import fiona class TestReading(object): @pytest.fixture(autouse=True) def bytes_collection_object(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf, encoding="utf-8") yield self.c.close() @pytest.mark.skipif(six.PY2, reason='string are bytes in Python 2') def test_construct_with_str(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: strbuf = src.read() with pytest.raises(ValueError): fiona.BytesCollection(strbuf) def test_open_repr(self): # I'm skipping checking the name of the virtual file as it produced by uuid. print(repr(self.c)) assert repr(self.c).startswith(" 0 def test_mode(self): assert self.c.mode == 'r' def test_collection(self): assert self.c.encoding == 'utf-8' def test_iter(self): assert iter(self.c) def test_closed_no_iter(self): self.c.close() with pytest.raises(ValueError): iter(self.c) def test_len(self): assert len(self.c) == 67 def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() assert len(self.c) == 0 def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() assert len(self.c) == 67 def test_driver(self): assert self.c.driver == "GeoJSON" def test_closed_driver(self): self.c.close() assert self.c.driver is None def test_driver_closed_driver(self): self.c.driver self.c.close() assert self.c.driver == "GeoJSON" def test_schema(self): s = self.c.schema['properties'] assert s['PERIMETER'] == "float" assert s['NAME'] == "str" assert s['URL'] == "str" assert s['STATE_FIPS'] == "str" assert s['WILDRNP020'] == "int" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() assert self.c.schema is None def test_schema_closed_schema(self): self.c.schema self.c.close() assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] def test_crs(self): assert self.c.crs['init'] == 'epsg:4326' def test_crs_wkt(self): assert self.c.crs_wkt.startswith('GEOGCS["WGS 84"') def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() assert self.c.crs is None def test_crs_closed_crs(self): self.c.crs self.c.close() assert sorted(self.c.crs.keys()) == ['init'] def test_meta(self): assert (sorted(self.c.meta.keys()) == ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): assert self.c.bounds[0] == pytest.approx(-113.564247) assert self.c.bounds[1] == pytest.approx(37.068981) assert self.c.bounds[2] == pytest.approx(-104.970871) assert self.c.bounds[3] == pytest.approx(41.996277) def test_iter_one(self): itr = iter(self.c) f = next(itr) assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_list(self): f = list(self.c)[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_getitem_one(self): f = self.c[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_no_write(self): with pytest.raises(IOError): self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] assert i == 0 assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_keys_list(self): i = list(self.c.keys())[0] assert i == 0 def test_in_keys(self): assert 0 in self.c.keys() assert 0 in self.c class TestFilterReading(object): @pytest.fixture(autouse=True) def bytes_collection_object(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf) yield self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) assert len(results) == 67 f = results[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) assert len(results) == 26 results = list(self.c.filter()) assert len(results) == 67 def test_filter_mask(self): mask = { 'type': 'Polygon', 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) assert len(results) == 26 def test_zipped_bytes_collection(bytes_coutwildrnp_zip): """Open a zipped stream of bytes as a collection""" with fiona.BytesCollection(bytes_coutwildrnp_zip) as col: assert col.name == 'coutwildrnp' assert len(col) == 67 @pytest.mark.skipif(fiona.gdal_version >= (2, 3, 0), reason="Changed behavior with gdal 2.3, possibly related to RFC 70:" "Guessing output format from output file name extension for utilities") def test_grenada_bytes_geojson(bytes_grenada_geojson): """Read grenada.geojson as BytesCollection. grenada.geojson is an example of geojson that GDAL's GeoJSON driver will fail to read successfully unless the file's extension reflects its json'ness. """ # We expect an exception if the GeoJSON driver isn't specified. with pytest.raises(fiona.errors.FionaValueError): with fiona.BytesCollection(bytes_grenada_geojson) as col: pass # If told what driver to use, we should be good. with fiona.BytesCollection(bytes_grenada_geojson, driver='GeoJSON') as col: assert len(col) == 1 Fiona-1.8.21/tests/test_collection.py000066400000000000000000001020151420023252700175410ustar00rootroot00000000000000# Testing collections and workspaces import datetime import os import random import sys import re from collections import OrderedDict import pytest import fiona from fiona.collection import Collection from fiona.env import getenv, GDALVersion from fiona.errors import FionaValueError, DriverError, FionaDeprecationWarning from .conftest import WGS84PATTERN, get_temp_filename from fiona.drvsupport import supported_drivers, driver_mode_mingdal class TestSupportedDrivers(object): def test_shapefile(self): assert "ESRI Shapefile" in supported_drivers assert set(supported_drivers["ESRI Shapefile"]) == set("raw") def test_map(self): assert "MapInfo File" in supported_drivers assert set(supported_drivers["MapInfo File"]) == set("raw") class TestCollectionArgs(object): def test_path(self): with pytest.raises(TypeError): Collection(0) def test_mode(self): with pytest.raises(TypeError): Collection("foo", mode=0) def test_driver(self): with pytest.raises(TypeError): Collection("foo", mode='w', driver=1) def test_schema(self): with pytest.raises(TypeError): Collection("foo", mode='w', driver="ESRI Shapefile", schema=1) def test_crs(self): with pytest.raises(TypeError): Collection("foo", mode='w', driver="ESRI Shapefile", schema=0, crs=1) def test_encoding(self): with pytest.raises(TypeError): Collection("foo", mode='r', encoding=1) def test_layer(self): with pytest.raises(TypeError): Collection("foo", mode='r', layer=0.5) def test_vsi(self): with pytest.raises(TypeError): Collection("foo", mode='r', vsi='git') def test_archive(self): with pytest.raises(TypeError): Collection("foo", mode='r', archive=1) def test_write_numeric_layer(self): with pytest.raises(ValueError): Collection("foo", mode='w', layer=1) def test_write_geojson_layer(self): with pytest.raises(ValueError): Collection("foo", mode='w', driver='GeoJSON', layer='foo') def test_append_geojson(self): with pytest.raises(ValueError): Collection("foo", mode='w', driver='ARCGEN') class TestOpenException(object): def test_no_archive(self): with pytest.warns(FionaDeprecationWarning), pytest.raises(DriverError): fiona.open("/", mode='r', vfs="zip:///foo.zip") class TestReading(object): @pytest.fixture(autouse=True) def shapefile(self, path_coutwildrnp_shp): self.c = fiona.open(path_coutwildrnp_shp, "r") yield self.c.close() def test_open_repr(self, path_coutwildrnp_shp): assert ( repr(self.c) == ("".format(hexid=hex(id(self.c)), path=path_coutwildrnp_shp))) def test_closed_repr(self, path_coutwildrnp_shp): self.c.close() assert ( repr(self.c) == ("".format(hexid=hex(id(self.c)), path=path_coutwildrnp_shp))) def test_path(self, path_coutwildrnp_shp): assert self.c.path == path_coutwildrnp_shp def test_name(self): assert self.c.name == 'coutwildrnp' def test_mode(self): assert self.c.mode == 'r' def test_encoding(self): assert self.c.encoding is None def test_iter(self): assert iter(self.c) def test_closed_no_iter(self): self.c.close() with pytest.raises(ValueError): iter(self.c) def test_len(self): assert len(self.c) == 67 def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() assert len(self.c) == 0 def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() assert len(self.c) == 67 def test_driver(self): assert self.c.driver == "ESRI Shapefile" def test_closed_driver(self): self.c.close() assert self.c.driver is None def test_driver_closed_driver(self): self.c.driver self.c.close() assert self.c.driver == "ESRI Shapefile" def test_schema(self): s = self.c.schema['properties'] assert s['PERIMETER'] == "float:24.15" assert s['NAME'] == "str:80" assert s['URL'] == "str:101" assert s['STATE_FIPS'] == "str:80" assert s['WILDRNP020'] == "int:10" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() assert self.c.schema is None def test_schema_closed_schema(self): self.c.schema self.c.close() assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] def test_crs(self): crs = self.c.crs assert crs['init'] == 'epsg:4326' def test_crs_wkt(self): crs = self.c.crs_wkt assert re.match(WGS84PATTERN, crs) def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() assert self.c.crs is None def test_crs_closed_crs(self): self.c.crs self.c.close() assert sorted(self.c.crs.keys()) == ['init'] def test_meta(self): assert (sorted(self.c.meta.keys()) == ['crs', 'crs_wkt', 'driver', 'schema']) def test_profile(self): assert (sorted(self.c.profile.keys()) == ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): assert self.c.bounds[0] == pytest.approx(-113.564247) assert self.c.bounds[1] == pytest.approx(37.068981) assert self.c.bounds[2] == pytest.approx(-104.970871) assert self.c.bounds[3] == pytest.approx(41.996277) def test_context(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r") as c: assert c.name == 'coutwildrnp' assert len(c) == 67 assert c.crs assert c.closed def test_iter_one(self): itr = iter(self.c) f = next(itr) assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_list(self): f = list(self.c)[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_getitem_one(self): f = self.c[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_getitem_iter_combo(self): i = iter(self.c) f = next(i) f = next(i) assert f['id'] == "1" f = self.c[0] assert f['id'] == "0" f = next(i) assert f['id'] == "2" def test_no_write(self): with pytest.raises(IOError): self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] assert i == 0 assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_keys_list(self): i = list(self.c.keys())[0] assert i == 0 def test_in_keys(self): assert 0 in self.c.keys() assert 0 in self.c class TestReadingPathTest(object): def test_open_path(self, path_coutwildrnp_shp): pathlib = pytest.importorskip("pathlib") with fiona.open(pathlib.Path(path_coutwildrnp_shp)) as collection: assert collection.name == 'coutwildrnp' @pytest.mark.usefixtures("unittest_path_coutwildrnp_shp") class TestIgnoreFieldsAndGeometry(object): def test_without_ignore(self): with fiona.open(self.path_coutwildrnp_shp, "r") as collection: assert("AREA" in collection.schema["properties"].keys()) assert("STATE" in collection.schema["properties"].keys()) assert("NAME" in collection.schema["properties"].keys()) assert("geometry" in collection.schema.keys()) feature = next(iter(collection)) assert(feature["properties"]["AREA"] is not None) assert(feature["properties"]["STATE"] is not None) assert(feature["properties"]["NAME"] is not None) assert(feature["geometry"] is not None) def test_ignore_fields(self): with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["AREA", "STATE"]) as collection: assert("AREA" not in collection.schema["properties"].keys()) assert("STATE" not in collection.schema["properties"].keys()) assert("NAME" in collection.schema["properties"].keys()) assert("geometry" in collection.schema.keys()) feature = next(iter(collection)) assert("AREA" not in feature["properties"].keys()) assert("STATE" not in feature["properties"].keys()) assert(feature["properties"]["NAME"] is not None) assert(feature["geometry"] is not None) def test_ignore_invalid_field_missing(self): with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["DOES_NOT_EXIST"]): pass def test_ignore_invalid_field_not_string(self): with pytest.raises(TypeError): with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=[42]): pass def test_ignore_geometry(self): with fiona.open(self.path_coutwildrnp_shp, "r", ignore_geometry=True) as collection: assert("AREA" in collection.schema["properties"].keys()) assert("STATE" in collection.schema["properties"].keys()) assert("NAME" in collection.schema["properties"].keys()) assert("geometry" not in collection.schema.keys()) feature = next(iter(collection)) assert(feature["properties"]["AREA"] is not None) assert(feature["properties"]["STATE"] is not None) assert(feature["properties"]["NAME"] is not None) assert("geometry" not in feature.keys()) class TestFilterReading(object): @pytest.fixture(autouse=True) def shapefile(self, path_coutwildrnp_shp): self.c = fiona.open(path_coutwildrnp_shp, "r") yield self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) assert len(results) == 67 f = results[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) assert len(results) == 26 results = list(self.c.filter()) assert len(results) == 67 def test_filter_mask(self): mask = { 'type': 'Polygon', 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) assert len(results) == 26 class TestUnsupportedDriver(object): def test_immediate_fail_driver(self, tmpdir): schema = { 'geometry': 'Point', 'properties': {'label': 'str', u'verit\xe9': 'int'}} with pytest.raises(DriverError): fiona.open(str(tmpdir.join("foo")), "w", "Bogus", schema=schema) @pytest.mark.iconv class TestGenericWritingTest(object): @pytest.fixture(autouse=True) def no_iter_shp(self, tmpdir): schema = { 'geometry': 'Point', 'properties': [('label', 'str'), (u'verit\xe9', 'int')]} self.c = fiona.open(str(tmpdir.join("test-no-iter.shp")), 'w', driver="ESRI Shapefile", schema=schema, encoding='Windows-1252') yield self.c.close() def test_encoding(self): assert self.c.encoding == 'Windows-1252' def test_no_iter(self): with pytest.raises(IOError): iter(self.c) def test_no_filter(self): with pytest.raises(IOError): self.c.filter() class TestPropertiesNumberFormatting(object): @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.filename = str(tmpdir.join("properties_number_formatting_test")) _records_with_float_property1 = [ { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'property1': 12.22} }, { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, 'properties': {'property1': 12.88} } ] _records_with_float_property1_as_string = [ { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'property1': '12.22'} }, { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, 'properties': {'property1': '12.88'} } ] _records_with_invalid_number_property1 = [ { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.3)}, 'properties': {'property1': 'invalid number'} } ] def _write_collection(self, records, schema, driver): with fiona.open( self.filename, "w", driver=driver, schema=schema, crs='epsg:4326', encoding='utf-8' ) as c: c.writerecords(records) def test_shape_driver_truncates_float_property_to_requested_int_format(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1, {'geometry': 'Point', 'properties': [('property1', 'int')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12 == rf1['properties']['property1'] assert 12 == rf2['properties']['property1'] def test_shape_driver_rounds_float_property_to_requested_digits_number(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12.2 == rf1['properties']['property1'] assert 12.9 == rf2['properties']['property1'] def test_string_is_converted_to_number_and_truncated_to_requested_int_by_shape_driver(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1_as_string, {'geometry': 'Point', 'properties': [('property1', 'int')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12 == rf1['properties']['property1'] assert 12 == rf2['properties']['property1'] def test_string_is_converted_to_number_and_rounded_to_requested_digits_number_by_shape_driver(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1_as_string, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12.2 == rf1['properties']['property1'] assert 12.9 == rf2['properties']['property1'] def test_invalid_number_is_converted_to_0_and_written_by_shape_driver(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_invalid_number_property1, # {'geometry': 'Point', 'properties': [('property1', 'int')]}, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 1 == len(c) rf1 = c[0] assert 0 == rf1['properties']['property1'] def test_geojson_driver_truncates_float_property_to_requested_int_format(self): driver = "GeoJSON" self._write_collection( self._records_with_float_property1, {'geometry': 'Point', 'properties': [('property1', 'int')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12 == rf1['properties']['property1'] assert 12 == rf2['properties']['property1'] def test_geojson_driver_does_not_round_float_property_to_requested_digits_number(self): driver = "GeoJSON" self._write_collection( self._records_with_float_property1, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) # **************************************** # FLOAT FORMATTING IS NOT RESPECTED... assert 12.22 == rf1['properties']['property1'] assert 12.88 == rf2['properties']['property1'] def test_string_is_converted_to_number_and_truncated_to_requested_int_by_geojson_driver(self): driver = "GeoJSON" self._write_collection( self._records_with_float_property1_as_string, {'geometry': 'Point', 'properties': [('property1', 'int')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) assert 12 == rf1['properties']['property1'] assert 12 == rf2['properties']['property1'] def test_string_is_converted_to_number_but_not_rounded_to_requested_digits_number_by_geojson_driver(self): driver = "GeoJSON" self._write_collection( self._records_with_float_property1_as_string, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 2 == len(c) rf1, rf2 = list(c) # **************************************** # FLOAT FORMATTING IS NOT RESPECTED... assert 12.22 == rf1['properties']['property1'] assert 12.88 == rf2['properties']['property1'] def test_invalid_number_is_converted_to_0_and_written_by_geojson_driver(self): driver = "GeoJSON" self._write_collection( self._records_with_invalid_number_property1, # {'geometry': 'Point', 'properties': [('property1', 'int')]}, {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, driver ) with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: assert 1 == len(c) rf1 = c[0] assert 0 == rf1['properties']['property1'] class TestPointWriting(object): @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.filename = str(tmpdir.join("point_writing_test.shp")) self.sink = fiona.open( self.filename, "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('title', 'str'), ('date', 'date')]}, crs='epsg:4326', encoding='utf-8') yield self.sink.close() def test_cpg(self, tmpdir): """Requires GDAL 1.9""" self.sink.close() encoding = tmpdir.join("point_writing_test.cpg").read() assert encoding == "UTF-8" def test_write_one(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.1, 0.0, 0.1) self.sink.close() def test_write_two(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f1 = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'title': 'point two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) assert len(self.sink) == 2 assert self.sink.bounds == (0.0, -0.1, 0.0, 0.1) def test_write_one_null_geom(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': None, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) def test_validate_record(self): fvalid = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} finvalid = { 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'not-a-title': 'point two', 'date': "2012-01-29"}} assert self.sink.validate_record(fvalid) assert not self.sink.validate_record(finvalid) class TestLineWriting(object): @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.sink = fiona.open( str(tmpdir.join("line_writing_test.shp")), "w", driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': [('title', 'str'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}) yield self.sink.close() def test_write_one(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.1, 0.0, 0.2) def test_write_two(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) assert len(self.sink) == 2 assert self.sink.bounds == (0.0, -0.2, 0.0, 0.2) class TestPointAppend(object): @pytest.fixture(autouse=True) def shapefile(self, tmpdir, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r") as input: output_schema = input.schema output_schema['geometry'] = '3D Point' with fiona.open( str(tmpdir.join("test_append_point.shp")), 'w', crs=None, driver="ESRI Shapefile", schema=output_schema) as output: for f in input: f['geometry'] = { 'type': 'Point', 'coordinates': f['geometry']['coordinates'][0][0]} output.write(f) def test_append_point(self, tmpdir): with fiona.open(str(tmpdir.join("test_append_point.shp")), "a") as c: assert c.schema['geometry'] == '3D Point' c.write({'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, 'properties': {'PERIMETER': 1.0, 'FEATURE2': None, 'NAME': 'Foo', 'FEATURE1': None, 'URL': 'http://example.com', 'AGBUR': 'BAR', 'AREA': 0.0, 'STATE_FIPS': 1, 'WILDRNP020': 1, 'STATE': 'XL'}}) assert len(c) == 68 class TestLineAppend(object): @pytest.fixture(autouse=True) def shapefile(self, tmpdir): with fiona.open( str(tmpdir.join("test_append_line.shp")), "w", driver="ESRI Shapefile", schema={ 'geometry': 'MultiLineString', 'properties': {'title': 'str', 'date': 'date'}}, crs={'init': "epsg:4326", 'no_defs': True}) as output: f = {'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.1), (0.0, 0.2)]]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} output.writerecords([f]) def test_append_line(self, tmpdir): with fiona.open(str(tmpdir.join("test_append_line.shp")), "a") as c: assert c.schema['geometry'] == 'LineString' f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} c.writerecords([f1, f2]) assert len(c) == 3 assert c.bounds == (0.0, -0.2, 0.0, 0.2) def test_shapefile_field_width(tmpdir): name = str(tmpdir.join('textfield.shp')) with fiona.open( name, 'w', schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}, driver="ESRI Shapefile") as c: c.write( {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, 'properties': {'text': 'a' * 254}}) c = fiona.open(name, "r") assert c.schema['properties']['text'] == 'str:254' f = next(iter(c)) assert f['properties']['text'] == 'a' * 254 c.close() class TestCollection(object): def test_invalid_mode(self, tmpdir): with pytest.raises(ValueError): fiona.open(str(tmpdir.join("bogus.shp")), "r+") def test_w_args(self, tmpdir): with pytest.raises(FionaValueError): fiona.open(str(tmpdir.join("test-no-iter.shp")), "w") with pytest.raises(FionaValueError): fiona.open(str(tmpdir.join("test-no-iter.shp")), "w", "Driver") def test_no_path(self): with pytest.raises(Exception): fiona.open("no-path.shp", "a") def test_no_read_conn_str(self): with pytest.raises(DriverError): fiona.open("PG:dbname=databasename", "r") @pytest.mark.skipif(sys.platform.startswith("win"), reason="test only for *nix based system") def test_no_read_directory(self): with pytest.raises(DriverError): fiona.open("/dev/null", "r") def test_date(tmpdir): name = str(tmpdir.join("date_test.shp")) sink = fiona.open( name, "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('id', 'int'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}) recs = [{ 'geometry': {'type': 'Point', 'coordinates': (7.0, 50.0)}, 'properties': {'id': 1, 'date': '2013-02-25'} }, { 'geometry': {'type': 'Point', 'coordinates': (7.0, 50.2)}, 'properties': {'id': 1, 'date': datetime.date(2014, 2, 3)} }] sink.writerecords(recs) sink.close() assert len(sink) == 2 with fiona.open(name, "r") as c: assert len(c) == 2 rf1, rf2 = list(c) assert rf1['properties']['date'] == '2013-02-25' assert rf2['properties']['date'] == '2014-02-03' def test_open_kwargs(tmpdir, path_coutwildrnp_shp): dstfile = str(tmpdir.join('test.json')) with fiona.open(path_coutwildrnp_shp) as src: kwds = src.profile kwds['driver'] = 'GeoJSON' kwds['coordinate_precision'] = 2 with fiona.open(dstfile, 'w', **kwds) as dst: dst.writerecords(ftr for ftr in src) with open(dstfile) as f: assert '"coordinates": [ [ [ -111.74, 42.0 ], [ -111.66, 42.0 ]' in \ f.read(2000) @pytest.mark.network def test_collection_http(): ds = fiona.Collection( "https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.shp", vsi="https", ) assert ( ds.path == "/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.shp" ) assert len(ds) == 67 @pytest.mark.network def test_collection_zip_http(): ds = fiona.Collection( "https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip", vsi="zip+https", ) assert ( ds.path == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip" ) assert len(ds) == 67 def test_encoding_option_warning(tmpdir, caplog): """There is no ENCODING creation option log warning for GeoJSON""" fiona.Collection(str(tmpdir.join("test.geojson")), "w", driver="GeoJSON", crs="epsg:4326", schema={"geometry": "Point", "properties": {"foo": "int"}}) assert not caplog.text def test_closed_session_next(path_coutwildrnp_shp): """Confirm fix for issue #687""" src = fiona.open(path_coutwildrnp_shp) itr = iter(src) list(itr) src.close() with pytest.raises(FionaValueError): next(itr) def test_collection_no_env(path_coutwildrnp_shp): """We have no GDAL env left over from open""" collection = fiona.open(path_coutwildrnp_shp) assert collection with pytest.raises(Exception): getenv() def test_collection_env(path_coutwildrnp_shp): """We have a GDAL env within collection context""" with fiona.open(path_coutwildrnp_shp): assert 'FIONA_ENV' in getenv() @pytest.mark.parametrize('driver,filename', [('ESRI Shapefile', 'test.shp'), ('GeoJSON', 'test.json'), ('GPKG', 'test.gpkg')]) def test_mask_polygon_triangle(tmpdir, driver, filename): """ Test if mask works for non trivial geometries""" schema = {'geometry': 'Polygon', 'properties': OrderedDict([('position_i', 'int'), ('position_j', 'int')])} records = [{'geometry': {'type': 'Polygon', 'coordinates': (((float(i), float(j)), (float(i + 1), float(j)), (float(i + 1), float(j + 1)), (float(i), float(j + 1)), (float(i), float(j))),)}, 'properties': {'position_i': i, 'position_j': j}} for i in range(10) for j in range(10)] random.shuffle(records) path = str(tmpdir.join(filename)) with fiona.open(path, 'w', driver=driver, schema=schema,) as c: c.writerecords(records) with fiona.open(path) as c: items = list( c.items(mask={'type': 'Polygon', 'coordinates': (((2.0, 2.0), (4.0, 4.0), (4.0, 6.0), (2.0, 2.0)),)})) assert len(items) == 15 def test_collection__empty_column_name(tmpdir): """Based on pull #955""" tmpfile = str(tmpdir.join("test_empty.geojson")) with pytest.warns(UserWarning, match="Empty field name at index 0"): with fiona.open(tmpfile, "w", driver="GeoJSON", schema={ "geometry": "Point", "properties": {"": "str", "name": "str"} }) as tmp: tmp.writerecords([{ "geometry": {"type": "Point", "coordinates": [ 8, 49 ] }, "properties": { "": "", "name": "test" } }]) with fiona.open(tmpfile) as tmp: with pytest.warns(UserWarning, match="Empty field name at index 0"): assert tmp.schema == { "geometry": "Point", "properties": {"": "str", "name": "str"} } with pytest.warns(UserWarning, match="Empty field name at index 0"): next(tmp) Fiona-1.8.21/tests/test_collection_crs.py000066400000000000000000000046531420023252700204210ustar00rootroot00000000000000import os import re import pytest import fiona import fiona.crs from fiona.errors import CRSError from .conftest import WGS84PATTERN, requires_gdal2, requires_gdal3 def test_collection_crs_wkt(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: assert re.match(WGS84PATTERN, src.crs_wkt) def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp): """crs members of a dataset with no crs can be accessed safely.""" filename = str(tmpdir.join("test.shp")) with fiona.open(path_coutwildrnp_shp) as src: profile = src.meta del profile['crs'] del profile['crs_wkt'] with fiona.open(filename, 'w', **profile) as dst: assert dst.crs_wkt == "" assert dst.crs == {} @requires_gdal2 def test_collection_create_crs_wkt(tmpdir): """A collection can be created using crs_wkt""" filename = str(tmpdir.join("test.geojson")) wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst: assert dst.crs_wkt.startswith('GEOGCS["WGS 84') or dst.crs_wkt.startswith('GEOGCS["GCS_WGS_1984') with fiona.open(filename) as col: assert col.crs_wkt.startswith('GEOGCS["WGS 84') or col.crs_wkt.startswith('GEOGCS["GCS_WGS_1984') @requires_gdal3 def test_collection_urn_crs(tmpdir): filename = str(tmpdir.join("test.geojson")) crs = "urn:ogc:def:crs:OGC:1.3:CRS84" with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs=crs, driver='GeoJSON') as dst: assert dst.crs_wkt.startswith('GEOGCS["WGS 84') with fiona.open(filename) as col: assert col.crs_wkt.startswith('GEOGCS["WGS 84') def test_collection_invalid_crs(tmpdir): filename = str(tmpdir.join("test.geojson")) with pytest.raises(CRSError): with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs="12ab-invalid", driver='GeoJSON') as dst: pass def test_collection_invalid_crs_wkt(tmpdir): filename = str(tmpdir.join("test.geojson")) with pytest.raises(CRSError): with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt="12ab-invalid", driver='GeoJSON') as dst: pass Fiona-1.8.21/tests/test_collection_legacy.py000066400000000000000000000120141420023252700210640ustar00rootroot00000000000000# Testing collections and workspaces import unittest import re import pytest import fiona from .conftest import WGS84PATTERN @pytest.mark.usefixtures("unittest_path_coutwildrnp_shp") class ReadingTest(unittest.TestCase): def setUp(self): self.c = fiona.open(self.path_coutwildrnp_shp, "r") def tearDown(self): self.c.close() def test_open_repr(self): assert ( repr(self.c) == ("".format(hexid=hex(id(self.c)), path=self.path_coutwildrnp_shp))) def test_closed_repr(self): self.c.close() assert ( repr(self.c) == ("".format(hexid=hex(id(self.c)), path=self.path_coutwildrnp_shp))) def test_path(self): assert self.c.path == self.path_coutwildrnp_shp def test_name(self): assert self.c.name == 'coutwildrnp' def test_mode(self): assert self.c.mode == 'r' def test_encoding(self): assert self.c.encoding is None def test_iter(self): assert iter(self.c) def test_closed_no_iter(self): self.c.close() with pytest.raises(ValueError): iter(self.c) def test_len(self): assert len(self.c) == 67 def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() assert len(self.c) == 0 def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() assert len(self.c) == 67 def test_driver(self): assert self.c.driver == "ESRI Shapefile" def test_closed_driver(self): self.c.close() assert self.c.driver is None def test_driver_closed_driver(self): self.c.driver self.c.close() assert self.c.driver == "ESRI Shapefile" def test_schema(self): s = self.c.schema['properties'] assert s['PERIMETER'] == "float:24.15" assert s['NAME'] == "str:80" assert s['URL'] == "str:101" assert s['STATE_FIPS'] == "str:80" assert s['WILDRNP020'] == "int:10" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() assert self.c.schema is None def test_schema_closed_schema(self): self.c.schema self.c.close() assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] def test_crs(self): crs = self.c.crs assert crs['init'] == 'epsg:4326' def test_crs_wkt(self): crs = self.c.crs_wkt assert re.match(WGS84PATTERN, crs) def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() assert self.c.crs is None def test_crs_closed_crs(self): self.c.crs self.c.close() assert sorted(self.c.crs.keys()) == ['init'] def test_meta(self): assert (sorted(self.c.meta.keys()) == ['crs', 'crs_wkt', 'driver', 'schema']) def test_profile(self): assert (sorted(self.c.profile.keys()) == ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): assert self.c.bounds[0] == pytest.approx(-113.564247) assert self.c.bounds[1] == pytest.approx(37.068981) assert self.c.bounds[2] == pytest.approx(-104.970871) assert self.c.bounds[3] == pytest.approx(41.996277) def test_context(self): with fiona.open(self.path_coutwildrnp_shp, "r") as c: assert c.name == 'coutwildrnp' assert len(c) == 67 assert c.closed def test_iter_one(self): itr = iter(self.c) f = next(itr) assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_list(self): f = list(self.c)[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_getitem_one(self): f = self.c[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_getitem_iter_combo(self): i = iter(self.c) f = next(i) f = next(i) assert f['id'] == "1" f = self.c[0] assert f['id'] == "0" f = next(i) assert f['id'] == "2" def test_no_write(self): with pytest.raises(IOError): self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] assert i == 0 assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' def test_iter_keys_list(self): i = list(self.c.keys())[0] assert i == 0 def test_in_keys(self): assert 0 in self.c.keys() assert 0 in self.c Fiona-1.8.21/tests/test_compound_crs.py000066400000000000000000000012011420023252700200740ustar00rootroot00000000000000"""Test of compound CRS crash avoidance""" import fiona def test_compound_crs(data): """Don't crash""" prj = data.join("coutwildrnp.prj") prj.write("""COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],VERT_CS["unknown",VERT_DATUM["unknown",2005],UNIT["metre",1.0,AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""") with fiona.open(str(data.join("coutwildrnp.shp"))) as collection: assert isinstance(collection.crs, dict) Fiona-1.8.21/tests/test_crs.py000066400000000000000000000076021420023252700162030ustar00rootroot00000000000000import pytest from fiona import crs, _crs from fiona.errors import CRSError from .conftest import requires_gdal_lt_3 def test_proj_keys(): assert len(crs.all_proj_keys) == 87 assert 'init' in crs.all_proj_keys assert 'proj' in crs.all_proj_keys assert 'no_mayo' in crs.all_proj_keys def test_from_string(): # A PROJ.4 string with extra whitespace. val = crs.from_string( " +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs +foo ") assert len(val.items()) == 4 assert val['proj'] == 'longlat' assert val['ellps'] == 'WGS84' assert val['datum'] == 'WGS84' assert val['no_defs'] assert 'foo' not in val def test_from_string_utm(): # A PROJ.4 string with extra whitespace and integer UTM zone. val = crs.from_string( " +proj=utm +zone=13 +ellps=WGS84 +foo ") assert len(val.items()) == 3 assert val['proj'] == 'utm' assert val['ellps'] == 'WGS84' assert val['zone'] == 13 assert 'foo' not in val def test_to_string(): # Make a string from a mapping with a few bogus items val = { 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} assert crs.to_string( val) == "+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat" def test_to_string_utm(): # Make a string from a mapping with a few bogus items val = { 'proj': 'utm', 'ellps': 'WGS84', 'zone': 13, 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} assert crs.to_string( val) == "+ellps=WGS84 +no_defs +proj=utm +zone=13" def test_to_string_epsg(): val = {'init': 'epsg:4326', 'no_defs': True} assert crs.to_string(val) == "+init=epsg:4326 +no_defs" def test_to_string_zeroval(): # Make a string with some 0 values (e.g. esri:102017) val = {'proj': 'laea', 'lat_0': 90, 'lon_0': 0, 'x_0': 0, 'y_0': 0, 'ellps': 'WGS84', 'datum': 'WGS84', 'units': 'm', 'no_defs': True} assert crs.to_string(val) == ( "+datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs +proj=laea " "+units=m +x_0=0 +y_0=0") def test_from_epsg(): val = crs.from_epsg(4326) assert val['init'] == "epsg:4326" assert val['no_defs'] def test_from_epsg_neg(): try: crs.from_epsg(-1) except ValueError: pass except: raise def test_to_string_unicode(): # See issue #83. val = crs.to_string({ u'units': u'm', u'no_defs': True, u'datum': u'NAD83', u'proj': u'utm', u'zone': 16}) assert 'NAD83' in val @requires_gdal_lt_3 def test_wktext(): """Test +wktext parameter is preserved.""" proj4 = ('+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 ' '+x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext ' '+no_defs') assert 'wktext' in crs.from_string(proj4) def test_towgs84(): """+towgs84 is preserved""" proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' '+units=m +wktext +no_defs') assert 'towgs84' in crs.from_string(proj4) @requires_gdal_lt_3 def test_towgs84_wkt(): """+towgs84 +wktext are preserved in WKT""" proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' '+units=m +wktext +no_defs') wkt = _crs.crs_to_wkt(proj4) assert 'towgs84' in wkt assert 'wktext' in _crs.crs_to_wkt(proj4) @pytest.mark.parametrize("invalid_input", [ "a random string that is invalid", ("a", "tuple"), "-48567=409 =2095" ]) def test_invalid_crs(invalid_input): with pytest.raises(CRSError): _crs.crs_to_wkt(invalid_input) Fiona-1.8.21/tests/test_cursor_interruptions.py000066400000000000000000000122451420023252700217350ustar00rootroot00000000000000import fiona import pytest from fiona.drvsupport import driver_mode_mingdal, _driver_supports_mode from fiona.errors import DriverError from tests.conftest import get_temp_filename @pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() if _driver_supports_mode(driver, 'w')]) def test_write_getextent(tmpdir, driver, testdata_generator): """ Test if a call to OGR_L_GetExtent has side effects for writing """ schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) path = str(tmpdir.join(get_temp_filename(driver))) positions = set([int(r['properties']['position']) for r in records1 + records2]) with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema, **create_kwargs) as c: c.writerecords(records1) # Call to OGR_L_GetExtent try: c.bounds except DriverError: pass c.writerecords(records2) with fiona.open(path) as c: data = set([int(f['properties']['position']) for f in c]) assert len(positions) == len(data) for p in positions: assert p in data @pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() if _driver_supports_mode(driver, 'w')]) def test_read_getextent(tmpdir, driver, testdata_generator): """ Test if a call to OGR_L_GetExtent has side effects for reading """ schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) path = str(tmpdir.join(get_temp_filename(driver))) positions = set([int(r['properties']['position']) for r in records1 + records2]) with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema, **create_kwargs) as c: c.writerecords(records1) c.writerecords(records2) with fiona.open(path) as c: data = set() for _ in range(len(records1)): f = next(c) data.add(int(f['properties']['position'])) # Call to OGR_L_GetExtent try: c.bounds except DriverError: pass for _ in range(len(records1)): f = next(c) data.add(int(f['properties']['position'])) assert len(positions) == len(data) for p in positions: assert p in data @pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() if _driver_supports_mode(driver, 'w')]) def test_write_getfeaturecount(tmpdir, driver, testdata_generator): """ Test if a call to OGR_L_GetFeatureCount has side effects for writing """ schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) path = str(tmpdir.join(get_temp_filename(driver))) positions = set([int(r['properties']['position']) for r in records1 + records2]) with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema, **create_kwargs) as c: c.writerecords(records1) # Call to OGR_L_GetFeatureCount try: assert len(c) == len(records1) except TypeError: pass c.writerecords(records2) with fiona.open(path) as c: data = set([int(f['properties']['position']) for f in c]) assert len(positions) == len(data) for p in positions: assert p in data @pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() if _driver_supports_mode(driver, 'w')]) def test_read_getfeaturecount(tmpdir, driver, testdata_generator): """ Test if a call to OGR_L_GetFeatureCount has side effects for reading """ schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) path = str(tmpdir.join(get_temp_filename(driver))) positions = set([int(r['properties']['position']) for r in records1 + records2]) with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema, **create_kwargs) as c: c.writerecords(records1) c.writerecords(records2) with fiona.open(path) as c: data = set() for _ in range(len(records1)): f = next(c) data.add(int(f['properties']['position'])) # Call to OGR_L_GetFeatureCount try: assert len(data) == len(records1) except TypeError: pass for _ in range(len(records1)): f = next(c) data.add(int(f['properties']['position'])) try: assert len(data) == len(records1 + records2) except TypeError: pass assert len(positions) == len(data) for p in positions: assert p in dataFiona-1.8.21/tests/test_curve_geometries.py000066400000000000000000000007131420023252700207570ustar00rootroot00000000000000"""Tests of features related to GDAL RFC 49 See https://trac.osgeo.org/gdal/wiki/rfc49_curve_geometries. """ import fiona from .conftest import requires_gdal2 @requires_gdal2 def test_line_curve_conversion(path_curves_line_csv): """Convert curved geometries to linear approximations""" with fiona.open(path_curves_line_csv) as col: assert col.schema['geometry'] == 'Unknown' features = list(col) assert len(features) == 9 Fiona-1.8.21/tests/test_data_paths.py000066400000000000000000000031721420023252700175220ustar00rootroot00000000000000"""Tests of GDAL and PROJ data finding""" import os.path from click.testing import CliRunner import pytest import fiona from fiona._env import GDALDataFinder, PROJDataFinder from fiona.fio.main import main_group @pytest.mark.wheel def test_gdal_data_wheel(): """Get GDAL data path from a wheel""" assert GDALDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') @pytest.mark.wheel def test_proj_data_wheel(): """Get GDAL data path from a wheel""" assert PROJDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') @pytest.mark.wheel def test_env_gdal_data_wheel(): runner = CliRunner() result = runner.invoke(main_group, ['env', '--gdal-data']) assert result.exit_code == 0 assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') @pytest.mark.wheel def test_env_proj_data_wheel(): runner = CliRunner() result = runner.invoke(main_group, ['env', '--proj-data']) assert result.exit_code == 0 assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') def test_env_gdal_data_environ(monkeypatch): monkeypatch.setenv('GDAL_DATA', '/foo/bar') runner = CliRunner() result = runner.invoke(main_group, ['env', '--gdal-data']) assert result.exit_code == 0 assert result.output.strip() == '/foo/bar' def test_env_proj_data_environ(monkeypatch): monkeypatch.setenv('PROJ_LIB', '/foo/bar') runner = CliRunner() result = runner.invoke(main_group, ['env', '--proj-data']) assert result.exit_code == 0 assert result.output.strip() == '/foo/bar' Fiona-1.8.21/tests/test_datetime.py000066400000000000000000000677751420023252700172310ustar00rootroot00000000000000""" See also test_rfc3339.py for datetime parser tests. """ from collections import OrderedDict import fiona from fiona._env import get_gdal_version_num, calc_gdal_version_num import pytest from fiona.errors import DriverSupportError from fiona.rfc3339 import parse_time, parse_datetime from .conftest import get_temp_filename from fiona.env import GDALVersion import datetime from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, _driver_supports_field, _driver_converts_to_str, _driver_supports_timezones, _driver_supports_milliseconds, _driver_supports_mode) import pytz from pytz import timezone gdal_version = GDALVersion.runtime() def get_schema(driver, field_type): if driver == 'GPX': return {'properties': OrderedDict([('ele', 'float'), ('time', field_type)]), 'geometry': 'Point'} if driver == 'GPSTrackMaker': return { 'properties': OrderedDict([('name', 'str'), ('comment', 'str'), ('icon', 'int'), ('time', field_type)]), 'geometry': 'Point'} if driver == 'CSV': return {"properties": {"datefield": field_type}} return {"geometry": "Point", "properties": {"datefield": field_type}} def get_records(driver, values): if driver == 'GPX': return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, "properties": {'ele': 0, "time": val}} for val in values] if driver == 'GPSTrackMaker': return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, "properties": OrderedDict([('name', ''), ('comment', ''), ('icon', 48), ('time', val)])} for val in values] if driver == 'CSV': return [{"properties": {"datefield": val}} for val in values] return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, "properties": {"datefield": val}} for val in values] def get_schema_field(driver, schema): if driver in {'GPX', 'GPSTrackMaker'}: return schema["properties"]["time"] return schema["properties"]["datefield"] def get_field(driver, f): if driver in {'GPX', 'GPSTrackMaker'}: return f["properties"]["time"] return f['properties']['datefield'] class TZ(datetime.tzinfo): def __init__(self, minutes): self.minutes = minutes def utcoffset(self, dt): return datetime.timedelta(minutes=self.minutes) def generate_testdata(field_type, driver): """ Generate test cases for test_datefield Each test case has the format [(in_value1, true_value as datetime.*object), (in_value2, true_value as datetime.*object), ...] """ # Test data for 'date' data type if field_type == 'date': return [("2018-03-25", datetime.date(2018, 3, 25)), (datetime.date(2018, 3, 25), datetime.date(2018, 3, 25))] # Test data for 'datetime' data type if field_type == 'datetime': return [("2018-03-25T22:49:05", datetime.datetime(2018, 3, 25, 22, 49, 5)), (datetime.datetime(2018, 3, 25, 22, 49, 5), datetime.datetime(2018, 3, 25, 22, 49, 5)), ("2018-03-25T22:49:05.23", datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), (datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), ("2018-03-25T22:49:05.123456", datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), (datetime.datetime(2018, 3, 25, 22, 49, 5, 123456), datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), ("2018-03-25T22:49:05+01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), ("2018-03-25T22:49:05-01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')), datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich'))), (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')), datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain'))), (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15))), (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), ("2018-03-25T22:49:05-23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), ("2018-03-25T22:49:05+23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] # Test data for 'time' data type elif field_type == 'time': return [("22:49:05", datetime.time(22, 49, 5)), (datetime.time(22, 49, 5), datetime.time(22, 49, 5)), ("22:49:05.23", datetime.time(22, 49, 5, 230000)), (datetime.time(22, 49, 5, 230000), datetime.time(22, 49, 5, 230000)), ("22:49:05.123456", datetime.time(22, 49, 5, 123000)), (datetime.time(22, 49, 5, 123456), datetime.time(22, 49, 5, 123000)), ("22:49:05+01:30", datetime.time(22, 49, 5, tzinfo=TZ(90))), ("22:49:05-01:30", datetime.time(22, 49, 5, tzinfo=TZ(-90))), (datetime.time(22, 49, 5, tzinfo=TZ(90)), datetime.time(22, 49, 5, tzinfo=TZ(90))), (datetime.time(22, 49, 5, tzinfo=TZ(-90)), datetime.time(22, 49, 5, tzinfo=TZ(-90))), (datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15))), (datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), ("22:49:05-23:45", datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), ("22:49:05+23:45", datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] def compare_datetimes_utc(d1, d2): """ Test if two time objects are the same. Native times are assumed to be UTC""" if d1.tzinfo is None: d1 = d1.replace(tzinfo=TZ(0)) if d2.tzinfo is None: d2 = d2.replace(tzinfo=TZ(0)) return d1 == d2 def test_compare_datetimes_utc(): """ Test compare_datetimes_utc """ d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60)) d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) assert d1 == d2 assert compare_datetimes_utc(d1, d2) d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60)) d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) assert not d1 == d2 assert not compare_datetimes_utc(d1, d2) d1 = datetime.datetime(2020, 1, 21, 13, 0, 0, tzinfo=TZ(60)) d2 = datetime.datetime(2020, 1, 21, 5, 0, 0, tzinfo=TZ(-60 * 7)) assert d1 == d2 assert compare_datetimes_utc(d1, d2) d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc) assert d1 == d2 assert compare_datetimes_utc(d1, d2) d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) assert d1 == d2 assert compare_datetimes_utc(d1, d2) d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) assert d1 == d2 assert compare_datetimes_utc(d1, d2) def convert_time_to_utc(d): """ Convert datetime.time object to UTC""" d = datetime.datetime(1900, 1, 1, d.hour, d.minute, d.second, d.microsecond, d.tzinfo) d -= d.utcoffset() return d.time() def compare_times_utc(d1, d2): """ Test if two datetime.time objects with fixed timezones have the same UTC time""" if d1.tzinfo is not None: d1 = convert_time_to_utc(d1) if d2.tzinfo is not None: d2 = convert_time_to_utc(d2) return d1.replace(tzinfo=None) == d2.replace(tzinfo=None) def test_compare_times_utc(): """ Test compare_times_utc """ d1 = datetime.time(12, 30, 0, tzinfo=TZ(60)) d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) assert compare_times_utc(d1, d2) d1 = datetime.time(12, 30, 0, tzinfo=TZ(-60)) d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) assert not compare_times_utc(d1, d2) d1 = datetime.time(13, 0, 0, tzinfo=TZ(60)) d2 = datetime.time(5, 0, 0, tzinfo=TZ(-60 * 7)) assert compare_times_utc(d1, d2) d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('MET')).timetz() d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('EST')).timetz() assert compare_times_utc(d1, d2) def get_tz_offset(d): """ Returns a Timezone (sign, hours, minutes) tuples E.g.: for '2020-01-21T12:30:00+01:30' ('+', 1, 30) is returned """ offset_minutes = d.utcoffset().total_seconds() / 60 if offset_minutes < 0: sign = "-" else: sign = "+" hours = int(abs(offset_minutes) / 60) minutes = int(abs(offset_minutes) % 60) return sign, hours, minutes def test_get_tz_offset(): """ Test get_tz_offset""" d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(90)) assert get_tz_offset(d) == ('+', 1, 30) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-90)) assert get_tz_offset(d) == ('-', 1, 30) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60 * 24 - 15)) assert get_tz_offset(d) == ('+', 23, 45) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60 * 24 + 15)) assert get_tz_offset(d) == ('-', 23, 45) def generate_testcases(): """ Generate test cases for drivers that support datefields, convert datefields to string or do not support datefiels""" _test_cases_datefield = [] _test_cases_datefield_to_str = [] _test_cases_datefield_not_supported = [] for field_type in ['time', 'datetime', 'date']: # Select only driver that are capable of writing fields for driver, raw in supported_drivers.items(): if _driver_supports_mode(driver, 'w'): if _driver_supports_field(driver, field_type): if _driver_converts_field_type_silently_to_str(driver, field_type): _test_cases_datefield_to_str.append((driver, field_type)) else: _test_cases_datefield.append((driver, field_type)) else: _test_cases_datefield_not_supported.append((driver, field_type)) return _test_cases_datefield, _test_cases_datefield_to_str, _test_cases_datefield_not_supported test_cases_datefield, test_cases_datefield_to_str, test_cases_datefield_not_supported = generate_testcases() @pytest.mark.parametrize("driver, field_type", test_cases_datefield) def test_datefield(tmpdir, driver, field_type): """ Test date, time, datetime field types. """ def _validate(val, val_exp, field_type, driver): if field_type == 'date': return val == val_exp.isoformat() elif field_type == 'datetime': # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other # than UTC to UTC. Thus, both the datetime read by Fiona, as well as expected value are first converted to # UTC before compared. # Milliseconds if _driver_supports_milliseconds(driver): y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) if tz is not None: tz = TZ(tz) val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) return compare_datetimes_utc(val_d, val_exp) else: # No Milliseconds y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) if tz is not None: tz = TZ(tz) val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) return compare_datetimes_utc(val_d, val_exp.replace(microsecond=0)) elif field_type == 'time': # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other # than UTC to UTC. Thus, both the time read by Fiona, as well as expected value are first converted to UTC # before compared. # Milliseconds if _driver_supports_milliseconds(driver): y, m, d, hh, mm, ss, ms, tz = parse_time(val) if tz is not None: tz = TZ(tz) val_d = datetime.time(hh, mm, ss, ms, tz) return compare_times_utc(val_d, val_exp) else: # No Milliseconds y, m, d, hh, mm, ss, ms, tz = parse_time(val) if tz is not None: tz = TZ(tz) val_d = datetime.time(hh, mm, ss, ms, tz) return compare_times_utc(val_d, val_exp.replace(microsecond=0)) return False schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_exp = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) with fiona.open(path, 'r') as c: assert get_schema_field(driver, c.schema) == field_type items = [get_field(driver, f) for f in c] assert len(items) == len(values_in) for val, val_exp in zip(items, values_exp): assert _validate(val, val_exp, field_type, driver), \ "{} does not match {}".format(val, val_exp.isoformat()) @pytest.mark.parametrize("driver, field_type", test_cases_datefield_to_str) def test_datefield_driver_converts_to_string(tmpdir, driver, field_type): """ Test handling of date, time, datetime for drivers that convert these types to string. As the formatting can be arbitrary, we only test if the elements of a date / datetime / time object is included in the string. E.g. for the PCIDSK driver if hour 22 from date.time(22:49:05) is in '0000/00/00 22:49:05'. """ def _validate(val, val_exp, field_type, driver): if field_type == 'date': if (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val): return True elif field_type == 'datetime': if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: val_exp = convert_time_to_utc(val_exp) # datetime fields can, depending on the driver, support: # - Timezones # - Milliseconds, respectively Microseconds # No timezone if val_exp.utcoffset() is None: # No Milliseconds if not _driver_supports_milliseconds(driver): if (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val): return True else: # Microseconds if (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(val_exp.microsecond) in val): return True # Milliseconds elif (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(int(val_exp.microsecond / 1000)) in val): return True # With timezone else: sign, hours, minutes = get_tz_offset(val_exp) if minutes > 0: tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, hours=int(hours), minutes=int(minutes)) else: tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) print("tz", tz) # No Milliseconds if not _driver_supports_milliseconds(driver): if (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and tz in val): return True else: # Microseconds if (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(val_exp.microsecond) in val and tz in val): return True # Milliseconds elif (str(val_exp.year) in val and str(val_exp.month) in val and str(val_exp.day) in val and str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(int(val_exp.microsecond / 1000)) in val and tz in val): return True elif field_type == 'time': # time fields can, depending on the driver, support: # - Timezones # - Milliseconds, respectively Microseconds if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: val_exp = convert_time_to_utc(val_exp) # No timezone if val_exp.utcoffset() is None: # No Milliseconds if not _driver_supports_milliseconds(driver): if (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val): return True else: # Microseconds if (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(val_exp.microsecond) in val): return True # Milliseconds elif (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(int(val_exp.microsecond / 1000)) in val): return True # With timezone else: sign, hours, minutes = get_tz_offset(val_exp) if minutes > 0: tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, hours=int(hours), minutes=int(minutes)) else: tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) # No Milliseconds if not _driver_supports_milliseconds(driver): if (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and tz in val): return True else: # Microseconds if (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(val_exp.microsecond) in val and tz in val): return True # Milliseconds elif (str(val_exp.hour) in val and str(val_exp.minute) in val and str(val_exp.second) in val and str(int(val_exp.microsecond / 1000)) in val and tz in val): return True return False schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_exp = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_exp) with pytest.warns(UserWarning) as record: with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) assert len(record) == 1 assert "silently converts" in record[0].message.args[0] with fiona.open(path, 'r') as c: assert get_schema_field(driver, c.schema) == 'str' items = [get_field(driver, f) for f in c] assert len(items) == len(values_in) for val, val_exp in zip(items, values_exp): assert _validate(val, val_exp, field_type, driver), \ "{} does not match {}".format(val, val_exp.isoformat()) @pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') @pytest.mark.parametrize("driver,field_type", test_cases_datefield + test_cases_datefield_to_str) def test_datefield_null(tmpdir, driver, field_type): """ Test handling of null values for date, time, datetime types for write capable drivers """ def _validate(val, val_exp, field_type, driver): if (driver == 'MapInfo File' and field_type == 'time' and calc_gdal_version_num(2, 0, 0) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 1)): return val == '00:00:00' if val is None or val == '': return True return False schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in = [None] records = get_records(driver, values_in) with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) with fiona.open(path, 'r') as c: items = [get_field(driver, f) for f in c] assert len(items) == 1 assert _validate(items[0], None, field_type, driver), \ "{} does not match {}".format(items[0], None) @pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) def test_datetime_field_unsupported(tmpdir, driver, field_type): """ Test if DriverSupportError is raised for unsupported field_types""" schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_out = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) with pytest.raises(DriverSupportError): with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) @pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) def test_datetime_field_type_marked_not_supported_is_not_supported(tmpdir, driver, field_type, monkeypatch): """ Test if a date/datetime/time field type marked as not not supported is really not supported Warning: Success of this test does not necessary mean that a field is not supported. E.g. errors can occour due to special schema requirements of drivers. This test only covers the standard case. """ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") monkeypatch.delitem(fiona.drvsupport._driver_field_type_unsupported[field_type], driver) schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_out = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) is_good = True try: with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) with fiona.open(path, 'r') as c: if not get_schema_field(driver, c.schema) == field_type: is_good = False items = [get_field(driver, f) for f in c] for val_in, val_out in zip(items, values_out): if not val_in == val_out: is_good = False except: is_good = False assert not is_good def generate_tostr_testcases(): """ Flatten driver_converts_to_str to a list of (field_type, driver) tuples""" cases = [] for field_type in _driver_converts_to_str: for driver in _driver_converts_to_str[field_type]: driver_supported = driver in supported_drivers driver_can_write = _driver_supports_mode(driver, 'w') field_supported = _driver_supports_field(driver, field_type) converts_to_str = _driver_converts_field_type_silently_to_str(driver, field_type) if driver_supported and driver_can_write and converts_to_str and field_supported: cases.append((field_type, driver)) return cases @pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') @pytest.mark.parametrize("driver,field_type", test_cases_datefield_to_str) def test_driver_marked_as_silently_converts_to_str_converts_silently_to_str(tmpdir, driver, field_type, monkeypatch): """ Test if a driver and field_type is marked in fiona.drvsupport.driver_converts_to_str to convert to str really silently converts to str If this test fails, it should be considered to replace the respective None value in fiona.drvsupport.driver_converts_to_str with a GDALVersion(major, minor) value. """ monkeypatch.delitem(fiona.drvsupport._driver_converts_to_str[field_type], driver) schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_out = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) with fiona.open(path, 'w', driver=driver, schema=schema) as c: c.writerecords(records) with fiona.open(path, 'r') as c: assert get_schema_field(driver, c.schema) == 'str' def test_read_timezone_geojson(path_test_tz_geojson): """Test if timezones are read correctly""" with fiona.open(path_test_tz_geojson) as c: items = list(c) assert items[0]['properties']['test'] == '2015-04-22T00:00:00+07:00' Fiona-1.8.21/tests/test_driver_options.py000066400000000000000000000017431420023252700204620ustar00rootroot00000000000000import os import tempfile from collections import OrderedDict import glob import fiona from tests.conftest import get_temp_filename, requires_gdal2 @requires_gdal2 def test_gml_format_option(): """ Test GML dataset creation option FORMAT (see https://github.com/Toblerity/Fiona/issues/968)""" schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])} records = [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in range(10)] tmpdir = tempfile.mkdtemp() fpath = os.path.join(tmpdir, get_temp_filename('GML')) with fiona.open(fpath, 'w', driver="GML", schema=schema, FORMAT="GML3") as out: out.writerecords(records) xsd_path = glob.glob(os.path.join(tmpdir, "*.xsd"))[0] with open(xsd_path) as f: xsd = f.read() assert "http://schemas.opengis.net/gml/3.1.1" in xsd Fiona-1.8.21/tests/test_drivers.py000066400000000000000000000016311420023252700170660ustar00rootroot00000000000000"""Tests for Fiona's OGR driver interface.""" import logging import pytest import fiona from fiona.errors import FionaDeprecationWarning def test_options(tmpdir, path_coutwildrnp_shp): """Test that setting CPL_DEBUG=ON works and that a warning is raised.""" logfile = str(tmpdir.mkdir('tests').join('test_options.log')) logger = logging.getLogger() logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logfile) fh.setLevel(logging.DEBUG) logger.addHandler(fh) # fiona.drivers() will be deprecated. with pytest.warns(FionaDeprecationWarning): with fiona.drivers(CPL_DEBUG=True): c = fiona.open(path_coutwildrnp_shp) c.close() with open(logfile, "r") as f: log = f.read() if fiona.gdal_version.major >= 2: assert "GDALOpen" in log else: assert "OGROpen" in log Fiona-1.8.21/tests/test_drvsupport.py000066400000000000000000000245041420023252700176440ustar00rootroot00000000000000"""Tests of driver support""" import logging import pytest from .conftest import requires_gdal24, get_temp_filename from fiona.drvsupport import supported_drivers, driver_mode_mingdal import fiona.drvsupport from fiona.env import GDALVersion from fiona._env import calc_gdal_version_num, get_gdal_version_num from fiona.errors import DriverError log = logging.getLogger() @requires_gdal24 @pytest.mark.parametrize("format", ["GeoJSON", "ESRIJSON", "TopoJSON", "GeoJSONSeq"]) def test_geojsonseq(format): """Format is available""" assert format in fiona.drvsupport.supported_drivers.keys() @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "w" in raw] ) def test_write_or_driver_error(tmpdir, driver, testdata_generator): """ Test if write mode works. """ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( driver, range(0, 10), [] ) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal[ "w" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["w"][driver] ): # Test if DriverError is raised for gdal < driver_mode_mingdal with pytest.raises(DriverError): with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) else: # Test if we can write with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) if driver in {"FileGDB", "OpenFileGDB"}: open_driver = driver else: open_driver = None with fiona.open(path, driver=open_driver) as collection: assert collection.driver == driver assert len(list(collection)) == len(records1) @pytest.mark.parametrize( "driver", [driver for driver in driver_mode_mingdal["w"].keys()] ) def test_write_does_not_work_when_gdal_smaller_mingdal( tmpdir, driver, testdata_generator, monkeypatch ): """ Test if driver really can't write for gdal < driver_mode_mingdal If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. """ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") if driver == "FlatGeobuf" and calc_gdal_version_num( 3, 1, 0 ) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 3): pytest.skip("See https://github.com/Toblerity/Fiona/pull/924") schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( driver, range(0, 10), [] ) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal[ "w" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["w"][driver] ): monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal["w"], driver) with pytest.raises(Exception): with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "a" in raw] ) def test_append_or_driver_error(tmpdir, testdata_generator, driver): """Test if driver supports append mode. Some driver only allow a specific schema. These drivers can be excluded by adding them to blacklist_append_drivers. """ if driver == "DGN": pytest.xfail("DGN schema has changed") if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") path = str(tmpdir.join(get_temp_filename(driver))) schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( driver, range(0, 5), range(5, 10) ) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal[ "w" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["w"][driver] ): return # Create test file to append to with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) if driver in driver_mode_mingdal[ "a" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["a"][driver] ): # Test if DriverError is raised for gdal < driver_mode_mingdal with pytest.raises(DriverError): with fiona.open(path, "a", driver=driver) as c: c.writerecords(records2) else: # Test if we can append with fiona.open(path, "a", driver=driver) as c: c.writerecords(records2) if driver in {"FileGDB", "OpenFileGDB"}: open_driver = driver else: open_driver = None with fiona.open(path, driver=open_driver) as collection: assert collection.driver == driver assert len(list(collection)) == len(records1) + len(records2) @pytest.mark.parametrize( "driver", [ driver for driver in driver_mode_mingdal["a"].keys() if driver in supported_drivers ], ) def test_append_does_not_work_when_gdal_smaller_mingdal( tmpdir, driver, testdata_generator, monkeypatch ): """Test if driver supports append mode. If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. """ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") path = str(tmpdir.join(get_temp_filename(driver))) schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( driver, range(0, 5), range(5, 10) ) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal[ "w" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["w"][driver] ): return # Create test file to append to with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) if driver in driver_mode_mingdal[ "a" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["a"][driver] ): # Test if driver really can't append for gdal < driver_mode_mingdal monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal["a"], driver) with pytest.raises(Exception): with fiona.open(path, "a", driver=driver) as c: c.writerecords(records2) if driver in {"FileGDB", "OpenFileGDB"}: open_driver = driver else: open_driver = None with fiona.open(path, driver=open_driver) as collection: assert collection.driver == driver assert len(list(collection)) == len(records1) + len(records2) @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if raw == "r"] ) def test_no_write_driver_cannot_write(tmpdir, driver, testdata_generator, monkeypatch): """Test if read only driver cannot write If this test fails, it should be considered to enable write support for the respective driver in drvsupport.py. """ monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, "rw") schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( driver, range(0, 5), [] ) if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") if driver == "FlatGeobuf": pytest.xfail("FlatGeobuf doesn't raise an error but doesn't have write support") path = str(tmpdir.join(get_temp_filename(driver))) with pytest.raises(Exception): with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) @pytest.mark.parametrize( "driver", [ driver for driver, raw in supported_drivers.items() if "w" in raw and "a" not in raw ], ) def test_no_append_driver_cannot_append( tmpdir, driver, testdata_generator, monkeypatch ): """ Test if a driver that supports write and not append cannot also append If this test fails, it should be considered to enable append support for the respective driver in drvsupport.py. """ monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, "raw") if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") path = str(tmpdir.join(get_temp_filename(driver))) schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( driver, range(0, 5), range(5, 10) ) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal[ "w" ] and get_gdal_version_num() < calc_gdal_version_num( *driver_mode_mingdal["w"][driver] ): return # Create test file to append to with fiona.open( path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs ) as c: c.writerecords(records1) try: with fiona.open(path, "a", driver=driver) as c: c.writerecords(records2) except Exception as exc: log.exception("Caught exception in trying to append.") return if driver in {"FileGDB", "OpenFileGDB"}: open_driver = driver else: open_driver = None with fiona.open(path, driver=open_driver) as collection: assert collection.driver == driver assert len(list(collection)) == len(records1) def test_mingdal_drivers_are_supported(): """Test if mode and driver is enabled in supported_drivers""" for mode in driver_mode_mingdal: for driver in driver_mode_mingdal[mode]: # we cannot test drivers that are not present in the gdal installation if driver in supported_drivers: assert mode in supported_drivers[driver] Fiona-1.8.21/tests/test_encoding.py000066400000000000000000000030641420023252700172000ustar00rootroot00000000000000# coding=utf-8 """Encoding tests""" from glob import glob import os import shutil import pytest import fiona from .conftest import requires_gdal2 @pytest.fixture(scope='function') def gre_shp_cp1252(tmpdir): """A tempdir containing copies of gre.* files, .cpg set to cp1252 The shapefile attributes are in fact utf-8 encoded. """ test_files = glob(os.path.join(os.path.dirname(__file__), 'data/gre.*')) tmpdir = tmpdir.mkdir('data') for filename in test_files: shutil.copy(filename, str(tmpdir)) tmpdir.join('gre.cpg').write('CP1252') yield tmpdir.join('gre.shp') @requires_gdal2 def test_broken_encoding(gre_shp_cp1252): """Reading as cp1252 mis-encodes a Russian name""" with fiona.open(str(gre_shp_cp1252)) as src: assert src.session._get_internal_encoding() == 'utf-8' feat = next(iter(src)) assert feat['properties']['name_ru'] != u'Гренада' @requires_gdal2 def test_cpg_encoding(gre_shp_cp1252): """Reads a Russian name""" gre_shp_cp1252.join('../gre.cpg').write('UTF-8') with fiona.open(str(gre_shp_cp1252)) as src: assert src.session._get_internal_encoding() == 'utf-8' feat = next(iter(src)) assert feat['properties']['name_ru'] == u'Гренада' @requires_gdal2 def test_override_encoding(gre_shp_cp1252): """utf-8 override succeeds""" with fiona.open(str(gre_shp_cp1252), encoding='utf-8') as src: assert src.session._get_internal_encoding() == 'utf-8' assert next(iter(src))['properties']['name_ru'] == u'Гренада' Fiona-1.8.21/tests/test_env.py000066400000000000000000000072241420023252700162040ustar00rootroot00000000000000"""Tests of fiona.env""" import os import sys try: from unittest import mock except ImportError: import mock import pytest import fiona from fiona import _env from fiona.env import getenv, ensure_env, ensure_env_with_credentials from fiona.session import AWSSession, GSSession def test_nested_credentials(monkeypatch): """Check that rasterio.open() doesn't wipe out surrounding credentials""" @ensure_env_with_credentials def fake_opener(path): return fiona.env.getenv() with fiona.env.Env(session=AWSSession(aws_access_key_id='foo', aws_secret_access_key='bar')): assert fiona.env.getenv()['AWS_ACCESS_KEY_ID'] == 'foo' assert fiona.env.getenv()['AWS_SECRET_ACCESS_KEY'] == 'bar' monkeypatch.setenv('AWS_ACCESS_KEY_ID', 'lol') monkeypatch.setenv('AWS_SECRET_ACCESS_KEY', 'wut') gdalenv = fake_opener('s3://foo/bar') assert gdalenv['AWS_ACCESS_KEY_ID'] == 'foo' assert gdalenv['AWS_SECRET_ACCESS_KEY'] == 'bar' def test_ensure_env_decorator(gdalenv): @ensure_env def f(): return getenv()['FIONA_ENV'] assert f() is True def test_ensure_env_decorator_sets_gdal_data(gdalenv, monkeypatch): """fiona.env.ensure_env finds GDAL from environment""" @ensure_env def f(): return getenv()['GDAL_DATA'] monkeypatch.setenv('GDAL_DATA', '/lol/wut') assert f() == '/lol/wut' @mock.patch("fiona._env.GDALDataFinder.find_file") def test_ensure_env_decorator_sets_gdal_data_prefix(find_file, gdalenv, monkeypatch, tmpdir): """fiona.env.ensure_env finds GDAL data under a prefix""" @ensure_env def f(): return getenv()['GDAL_DATA'] find_file.return_value = None tmpdir.ensure("share/gdal/header.dxf") monkeypatch.delenv('GDAL_DATA', raising=False) monkeypatch.setattr(_env, '__file__', str(tmpdir.join("fake.py"))) monkeypatch.setattr(sys, 'prefix', str(tmpdir)) assert f() == str(tmpdir.join("share").join("gdal")) @mock.patch("fiona._env.GDALDataFinder.find_file") def test_ensure_env_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): """fiona.env.ensure_env finds GDAL data in a wheel""" @ensure_env def f(): return getenv()['GDAL_DATA'] find_file.return_value = None tmpdir.ensure("gdal_data/header.dxf") monkeypatch.delenv('GDAL_DATA', raising=False) monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) assert f() == str(tmpdir.join("gdal_data")) @mock.patch("fiona._env.GDALDataFinder.find_file") def test_ensure_env_with_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): """fiona.env.ensure_env finds GDAL data in a wheel""" @ensure_env_with_credentials def f(*args): return getenv()['GDAL_DATA'] find_file.return_value = None tmpdir.ensure("gdal_data/header.dxf") monkeypatch.delenv('GDAL_DATA', raising=False) monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) assert f("foo") == str(tmpdir.join("gdal_data")) def test_ensure_env_crs(path_coutwildrnp_shp): """Decoration of .crs works""" assert fiona.open(path_coutwildrnp_shp).crs def test_nested_gs_credentials(monkeypatch): """Check that rasterio.open() doesn't wipe out surrounding credentials""" @ensure_env_with_credentials def fake_opener(path): return fiona.env.getenv() with fiona.env.Env(session=GSSession(google_application_credentials='foo')): assert fiona.env.getenv()['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' gdalenv = fake_opener('gs://foo/bar') assert gdalenv['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' Fiona-1.8.21/tests/test_feature.py000066400000000000000000000117271420023252700170520ustar00rootroot00000000000000"""Tests for feature objects.""" import logging import os import shutil import sys import tempfile import pytest import fiona from fiona import collection from fiona.collection import Collection from fiona.ogrext import featureRT class TestPointRoundTrip(object): def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Point', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", driver="ESRI Shapefile", schema=schema) def teardown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert ( sorted(g['geometry'].items()) == [('coordinates', (0.0, 0.0)), ('type', 'Point')]) def test_properties(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert g['properties']['title'] == 'foo' def test_none_property(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': None} } g = featureRT(f, self.c) assert g['properties']['title'] is None class TestLineStringRoundTrip(object): def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'LineString', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) def teardown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': { 'type': 'LineString', 'coordinates': [(0.0, 0.0), (1.0, 1.0)] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert ( sorted(g['geometry'].items()) == [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), ('type', 'LineString')]) def test_properties(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert g['properties']['title'] == 'foo' class TestPolygonRoundTrip(object): def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Polygon', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) def teardown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': { 'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert ( sorted(g['geometry'].items()) == [('coordinates', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), ('type', 'Polygon')]) def test_properties(self): f = { 'id': '1', 'geometry': { 'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) assert g['properties']['title'] == 'foo' @pytest.mark.parametrize("driver, extension", [("ESRI Shapefile", "shp"), ("GeoJSON", "geojson")]) def test_feature_null_field(tmpdir, driver, extension): """ In GDAL 2.2 the behaviour of OGR_F_IsFieldSet slightly changed. Some drivers (e.g. GeoJSON) also require fields to be explicitly set to null. See GH #460. """ meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {"RETURN_P": "str"}}} filename = os.path.join(str(tmpdir), "test_null."+extension) with fiona.open(filename, "w", **meta) as dst: g = {"coordinates": [1.0, 2.0], "type": "Point"} feature = {"geometry": g, "properties": {"RETURN_P": None}} dst.write(feature) with fiona.open(filename, "r") as src: feature = next(iter(src)) assert(feature["properties"]["RETURN_P"] is None) Fiona-1.8.21/tests/test_fio_bounds.py000066400000000000000000000057231420023252700175450ustar00rootroot00000000000000"""Tests for `$ fio bounds`.""" import re from fiona.fio import bounds from fiona.fio.main import main_group def test_fail(runner): result = runner.invoke(main_group, ['bounds', ], '5') assert result.exit_code == 1 def test_seq(feature_seq, runner): result = runner.invoke(main_group, ['bounds', ], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_seq_rs(feature_seq_pp_rs, runner): result = runner.invoke(main_group, ['bounds', ], feature_seq_pp_rs) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_precision(feature_seq, runner): result = runner.invoke(main_group, ['bounds', '--precision', 1], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d{1}\D', result.output)) == 8 def test_explode(feature_collection, runner): result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_explode_pp(feature_collection_pp, runner): result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_with_id(feature_seq, runner): result = runner.invoke(main_group, ['bounds', '--with-id'], feature_seq) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 def test_explode_with_id(feature_collection, runner): result = runner.invoke( main_group, ['bounds', '--explode', '--with-id'], feature_collection) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 def test_with_obj(feature_seq, runner): result = runner.invoke(main_group, ['bounds', '--with-obj'], feature_seq) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 def test_bounds_explode_with_obj(feature_collection, runner): result = runner.invoke( main_group, ['bounds', '--explode', '--with-obj'], feature_collection) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 def test_explode_output_rs(feature_collection, runner): result = runner.invoke(main_group, ['bounds', '--explode', '--rs'], feature_collection) assert result.exit_code == 0 assert result.output.count(u'\u001e') == 2 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 Fiona-1.8.21/tests/test_fio_calc.py000066400000000000000000000037101420023252700171470ustar00rootroot00000000000000"""Tests for `$ fio calc`.""" from __future__ import division import json from click.testing import CliRunner from fiona.fio.main import main_group def test_fail(): runner = CliRunner() result = runner.invoke(main_group, ['calc', "TEST", "f.properties.test > 5"], '{"type": "no_properties"}') assert result.exit_code == 1 def _load(output): features = [] for x in output.splitlines(): try: features.append(json.loads(x)) except: # Click combines stdout and stderr and shapely dumps logs to # stderr that are not JSON # https://github.com/pallets/click/issues/371 pass return features def test_calc_seq(feature_seq, runner): result = runner.invoke(main_group, ['calc', "TEST", "f.properties.AREA / f.properties.PERIMETER"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 for feat in feats: assert feat['properties']['TEST'] == \ feat['properties']['AREA'] / feat['properties']['PERIMETER'] def test_bool_seq(feature_seq, runner): result = runner.invoke(main_group, ['calc', "TEST", "f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 assert feats[0]['properties']['TEST'] assert not feats[1]['properties']['TEST'] def test_existing_property(feature_seq, runner): result = runner.invoke(main_group, ['calc', "AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 1 result = runner.invoke(main_group, ['calc', "--overwrite", "AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 for feat in feats: assert 'AREA' in feat['properties'] Fiona-1.8.21/tests/test_fio_cat.py000066400000000000000000000054141420023252700170170ustar00rootroot00000000000000"""Tests for `$ fio cat`.""" import os import pytest from click.testing import CliRunner from fiona.fio.main import main_group from fiona.fio import cat def test_one(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke(main_group, ['cat', path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 def test_two(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke(main_group, ['cat', path_coutwildrnp_shp, path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 134 def test_bbox_no(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['cat', path_coutwildrnp_shp, '--bbox', '0,10,80,20'], catch_exceptions=False) assert result.exit_code == 0 assert result.output == "" def test_bbox_yes(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['cat', path_coutwildrnp_shp, '--bbox', '-109,37,-107,39'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 def test_bbox_yes_two_files(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['cat', path_coutwildrnp_shp, path_coutwildrnp_shp, '--bbox', '-109,37,-107,39'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 38 def test_bbox_json_yes(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['cat', path_coutwildrnp_shp, '--bbox', '[-109,37,-107,39]'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 def test_multi_layer(data_dir): layerdef = "1:coutwildrnp,1:coutwildrnp" runner = CliRunner() result = runner.invoke( main_group, ['cat', '--layer', layerdef, data_dir]) assert result.output.count('"Feature"') == 134 def test_multi_layer_fail(data_dir): runner = CliRunner() result = runner.invoke(main_group, ['cat', '--layer', '200000:coutlildrnp', data_dir]) assert result.exit_code != 0 def test_vfs(path_coutwildrnp_zip): runner = CliRunner() result = runner.invoke(main_group, [ 'cat', 'zip://{}'.format(path_coutwildrnp_zip)]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 def test_dst_crs_epsg3857(path_coutwildrnp_shp): """Confirm fix of issue #952""" runner = CliRunner() result = runner.invoke( main_group, ["cat", "--dst-crs", "EPSG:3857", path_coutwildrnp_shp] ) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 Fiona-1.8.21/tests/test_fio_collect.py000066400000000000000000000051371420023252700176770ustar00rootroot00000000000000"""Tests for `$ fio collect`.""" import json import sys from click.testing import CliRunner import pytest # from fiona.fio import collect from fiona.fio.main import main_group def test_collect_rs(feature_seq_pp_rs): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--src-crs', 'EPSG:3857'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 def test_collect_no_rs(feature_seq): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--src-crs', 'EPSG:3857'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 def test_collect_ld(feature_seq): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--with-ld-context', '--add-ld-context-item', 'foo=bar'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert '"@context": {' in result.output assert '"foo": "bar"' in result.output def test_collect_rec_buffered(feature_seq): runner = CliRunner() result = runner.invoke(main_group, ['collect', '--record-buffered'], feature_seq) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output def test_collect_noparse(feature_seq): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--no-parse'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 def test_collect_noparse_records(feature_seq): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--no-parse', '--record-buffered'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 def test_collect_src_crs(feature_seq): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--no-parse', '--src-crs', 'epsg:4326'], feature_seq, catch_exceptions=False) assert result.exit_code == 2 def test_collect_noparse_rs(feature_seq_pp_rs): runner = CliRunner() result = runner.invoke( main_group, ['collect', '--no-parse'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 Fiona-1.8.21/tests/test_fio_distrib.py000066400000000000000000000010641420023252700177050ustar00rootroot00000000000000"""Tests for `$ fio distrib`.""" from click.testing import CliRunner from fiona.fio.main import main_group def test_distrib(feature_collection_pp): runner = CliRunner() result = runner.invoke(main_group, ['distrib', ], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 def test_distrib_no_rs(feature_collection): runner = CliRunner() result = runner.invoke(main_group, ['distrib', ], feature_collection) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 Fiona-1.8.21/tests/test_fio_dump.py000066400000000000000000000017241420023252700172150ustar00rootroot00000000000000"""Unittests for $ fio dump""" import json from click.testing import CliRunner import fiona from fiona.fio.main import main_group def test_dump(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke(main_group, ['dump', path_coutwildrnp_shp]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output def test_dump_layer(path_gpx): for layer in ('routes', '1'): runner = CliRunner() result = runner.invoke(main_group, ['dump', path_gpx, '--layer', layer]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output def test_dump_layer_vfs(path_coutwildrnp_zip): path = 'zip://{}'.format(path_coutwildrnp_zip) result = CliRunner().invoke(main_group, ['dump', path]) assert result.exit_code == 0 loaded = json.loads(result.output) with fiona.open(path) as src: assert len(loaded['features']) == len(src) assert len(loaded['features']) > 0 Fiona-1.8.21/tests/test_fio_filter.py000066400000000000000000000014741420023252700175370ustar00rootroot00000000000000"""Tests for `$ fio filter`.""" from fiona.fio.main import main_group def test_fail(runner): result = runner.invoke(main_group, ['filter', "f.properties.test > 5" ], "{'type': 'no_properties'}") assert result.exit_code == 1 def test_seq(feature_seq, runner): result = runner.invoke(main_group, ['filter', "f.properties.AREA > 0.01"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 2 result = runner.invoke(main_group, ['filter', "f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 1 result = runner.invoke(main_group, ['filter', "f.properties.AREA > 0.02"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 0 Fiona-1.8.21/tests/test_fio_info.py000066400000000000000000000054641420023252700172100ustar00rootroot00000000000000"""Tests for ``$ fio info``.""" import json from pkg_resources import iter_entry_points import re import sys from click.testing import CliRunner import pytest from fiona.fio.main import main_group def test_info_json(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke(main_group, ['info', path_coutwildrnp_shp]) assert result.exit_code == 0 assert '"count": 67' in result.output assert '"crs": "EPSG:4326"' in result.output assert '"driver": "ESRI Shapefile"' in result.output assert '"name": "coutwildrnp"' in result.output def test_info_count(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['info', '--count', path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output == "67\n" def test_info_bounds(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( main_group, ['info', '--bounds', path_coutwildrnp_shp]) assert result.exit_code == 0 assert len(re.findall(r'\d*\.\d*', result.output)) == 4 def test_all_registered(): """Make sure all the subcommands are actually registered to the main CLI group.""" for ep in iter_entry_points('fiona.fio_commands'): assert ep.name in main_group.commands def _filter_info_warning(lines): """$ fio info can issue a RuntimeWarning, but click adds stderr to stdout so we have to filter it out before decoding JSON lines.""" lines = list(filter(lambda x: 'RuntimeWarning' not in x, lines)) return lines def test_info_no_count(path_gpx): """Make sure we can still get a `$ fio info` report on datasources that do not support feature counting, AKA `len(collection)`. """ runner = CliRunner() result = runner.invoke(main_group, ['info', path_gpx]) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "First line is warning & second is JSON. No more." assert json.loads(lines[0])['count'] is None def test_info_layer(path_gpx): for layer in ('routes', '1'): runner = CliRunner() result = runner.invoke(main_group, [ 'info', path_gpx, '--layer', layer]) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "1st line is warning & 2nd is JSON - no more." assert json.loads(lines[0])['name'] == 'routes' def test_info_vfs(path_coutwildrnp_zip, path_coutwildrnp_shp): runner = CliRunner() zip_result = runner.invoke(main_group, [ 'info', 'zip://{}'.format(path_coutwildrnp_zip)]) shp_result = runner.invoke(main_group, [ 'info', path_coutwildrnp_shp]) assert zip_result.exit_code == shp_result.exit_code == 0 assert zip_result.output == shp_result.output Fiona-1.8.21/tests/test_fio_load.py000066400000000000000000000101571420023252700171670ustar00rootroot00000000000000"""Tests for `$ fio load`.""" import json import os import shutil import pytest import fiona from fiona.fio.main import main_group def test_err(runner): result = runner.invoke( main_group, ['load'], '', catch_exceptions=False) assert result.exit_code == 2 def test_exception(tmpdir, runner): tmpfile = str(tmpdir.mkdir('tests').join('test_exception.shp')) result = runner.invoke(main_group, [ 'load', '-f', 'Shapefile', tmpfile ], '42', catch_exceptions=False) assert result.exit_code == 1 def test_collection(tmpdir, feature_collection, runner): tmpfile = str(tmpdir.mkdir('tests').join('test_collection.shp')) result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_collection) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 def test_seq_rs(feature_seq_pp_rs, tmpdir, runner): tmpfile = str(tmpdir.mkdir('tests').join('test_seq_rs.shp')) result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_seq_pp_rs) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 def test_seq_no_rs(tmpdir, runner, feature_seq): tmpfile = str(tmpdir.mkdir('tests').join('test_seq_no_rs.shp')) result = runner.invoke(main_group, [ 'load', '-f', 'Shapefile', tmpfile], feature_seq) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 def test_dst_crs_default_to_src_crs(tmpdir, runner, feature_seq): """When --dst-crs is not given default to --src-crs.""" tmpfile = str(tmpdir.mkdir('tests').join('test_src_vs_dst_crs.shp')) result = runner.invoke(main_group, [ 'load', '--src-crs', 'EPSG:32617', '-f', 'Shapefile', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32617'} assert len(src) == len(feature_seq.splitlines()) def test_different_crs(tmpdir, runner, feature_seq): tmpfile = str(tmpdir.mkdir('tests').join('test_different_crs.shp')) result = runner.invoke( main_group, [ 'load', '--src-crs', 'EPSG:32617', '--dst-crs', 'EPSG:32610', '-f', 'Shapefile', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32610'} assert len(src) == len(feature_seq.splitlines()) def test_dst_crs_no_src(tmpdir, runner, feature_seq): tmpfile = str(tmpdir.mkdir('tests').join('test_dst_crs_no_src.shp')) result = runner.invoke(main_group, [ 'load', '--dst-crs', 'EPSG:32610', '-f', 'Shapefile', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32610'} assert len(src) == len(feature_seq.splitlines()) def test_fio_load_layer(tmpdir, runner): outdir = str(tmpdir.mkdir('tests').mkdir('test_fio_load_layer')) try: feature = { 'type': 'Feature', 'properties': {'key': 'value'}, 'geometry': { 'type': 'Point', 'coordinates': (5.0, 39.0) } } sequence = os.linesep.join(map(json.dumps, [feature, feature])) result = runner.invoke(main_group, [ 'load', outdir, '--driver', 'ESRI Shapefile', '--src-crs', 'EPSG:4236', '--layer', 'test_layer'], input=sequence) assert result.exit_code == 0 with fiona.open(outdir) as src: assert len(src) == 2 assert src.name == 'test_layer' assert src.schema['geometry'] == 'Point' finally: shutil.rmtree(outdir) @pytest.mark.iconv def test_creation_options(tmpdir, runner, feature_seq): tmpfile = str(tmpdir.mkdir("tests").join("test.shp")) result = runner.invoke( main_group, ["load", "-f", "Shapefile", "--co", "ENCODING=LATIN1", tmpfile], feature_seq, ) assert result.exit_code == 0 assert tmpdir.join("tests/test.cpg").read() == "LATIN1" Fiona-1.8.21/tests/test_fio_ls.py000066400000000000000000000033701420023252700166650ustar00rootroot00000000000000"""Unittests for `$ fio ls`""" import json import sys import os from click.testing import CliRunner import pytest import fiona from fiona.fio.main import main_group def test_fio_ls_single_layer(data_dir): result = CliRunner().invoke(main_group, ['ls', data_dir]) assert result.exit_code == 0 assert len(result.output.splitlines()) == 1 assert sorted(json.loads(result.output)) == ['coutwildrnp', 'gre', 'test_tin'] def test_fio_ls_indent(path_coutwildrnp_shp): result = CliRunner().invoke(main_group, [ 'ls', '--indent', '4', path_coutwildrnp_shp]) assert result.exit_code == 0 assert len(result.output.strip().splitlines()) == 3 assert json.loads(result.output) == ['coutwildrnp'] def test_fio_ls_multi_layer(path_coutwildrnp_shp, tmpdir): outdir = str(tmpdir.mkdir('test_fio_ls_multi_layer')) # Copy test shapefile into new directory # Shapefile driver treats a directory of shapefiles as a single # multi-layer datasource layer_names = ['l1', 'l2'] for layer in layer_names: with fiona.open(path_coutwildrnp_shp) as src, \ fiona.open(outdir, 'w', layer=layer, **src.meta) as dst: for feat in src: dst.write(feat) # Run CLI test result = CliRunner().invoke(main_group, [ 'ls', outdir]) assert result.exit_code == 0 json_result = json.loads(result.output) assert sorted(json_result) == sorted(layer_names) def test_fio_ls_vfs(path_coutwildrnp_zip): runner = CliRunner() result = runner.invoke(main_group, [ 'ls', 'zip://{}'.format(path_coutwildrnp_zip)]) assert result.exit_code == 0 loaded = json.loads(result.output) assert len(loaded) == 1 assert loaded[0] == 'coutwildrnp' Fiona-1.8.21/tests/test_fio_rm.py000066400000000000000000000033761420023252700166730ustar00rootroot00000000000000import os import pytest import fiona from click.testing import CliRunner from fiona.fio.main import main_group def create_sample_data(filename, driver, **extra_meta): meta = { 'driver': driver, 'schema': { 'geometry': 'Point', 'properties': {} } } meta.update(extra_meta) with fiona.open(filename, 'w', **meta) as dst: dst.write({ 'geometry': { 'type': 'Point', 'coordinates': (0, 0), }, 'properties': {}, }) assert(os.path.exists(filename)) drivers = ["ESRI Shapefile", "GeoJSON"] @pytest.mark.parametrize("driver", drivers) def test_remove(tmpdir, driver): extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] filename = "delete_me.{extension}".format(extension=extension) filename = str(tmpdir.join(filename)) create_sample_data(filename, driver) result = CliRunner().invoke(main_group, [ "rm", filename, "--yes" ]) print(result.output) assert result.exit_code == 0 assert not os.path.exists(filename) has_gpkg = "GPKG" in fiona.supported_drivers.keys() @pytest.mark.skipif(not has_gpkg, reason="Requires GPKG driver") def test_remove_layer(tmpdir): filename = str(tmpdir.join("a_filename.gpkg")) create_sample_data(filename, "GPKG", layer="layer1") create_sample_data(filename, "GPKG", layer="layer2") assert fiona.listlayers(filename) == ["layer1", "layer2"] result = CliRunner().invoke(main_group, [ "rm", filename, "--layer", "layer2", "--yes" ]) print(result.output) assert result.exit_code == 0 assert os.path.exists(filename) assert fiona.listlayers(filename) == ["layer1"] Fiona-1.8.21/tests/test_geojson.py000066400000000000000000000071411420023252700170560ustar00rootroot00000000000000import pytest import fiona from fiona.collection import supported_drivers from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError def test_json_read(path_coutwildrnp_json): with fiona.open(path_coutwildrnp_json, 'r') as c: assert len(c) == 67 def test_json(tmpdir): """Write a simple GeoJSON file""" path = str(tmpdir.join('foo.json')) with fiona.open(path, 'w', driver='GeoJSON', schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: c.writerecords([{ 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, 'properties': {'title': 'One'}}]) c.writerecords([{ 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, 'properties': {'title': 'Two'}}]) with fiona.open(path) as c: assert c.schema['geometry'] == 'Unknown' assert len(c) == 2 def test_json_overwrite(tmpdir): """Overwrite an existing GeoJSON file""" path = str(tmpdir.join('foo.json')) driver = "GeoJSON" schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} schema2 = {"geometry": "Unknown", "properties": [("other", "str")]} features1 = [ { "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, "properties": {"title": "One"}, }, { "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, "properties": {"title": "Two"}, } ] features2 = [ { "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, "properties": {"other": "Three"}, }, ] # write some data to a file with fiona.open(path, "w", driver=driver, schema=schema1) as c: c.writerecords(features1) # test the data was written correctly with fiona.open(path, "r") as c: assert len(c) == 2 feature = next(iter(c)) assert feature["properties"]["title"] == "One" # attempt to overwrite the existing file with some new data with fiona.open(path, "w", driver=driver, schema=schema2) as c: c.writerecords(features2) # test the second file was written correctly with fiona.open(path, "r") as c: assert len(c) == 1 feature = next(iter(c)) assert feature["properties"]["other"] == "Three" def test_json_overwrite_invalid(tmpdir): """Overwrite an existing file that isn't a valid GeoJSON""" # write some invalid data to a file path = str(tmpdir.join('foo.json')) with open(path, "w") as f: f.write("This isn't a valid GeoJSON file!!!") schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} features1 = [ { "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, "properties": {"title": "One"}, }, { "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, "properties": {"title": "Two"}, } ] # attempt to overwrite it with a valid file with fiona.open(path, "w", driver="GeoJSON", schema=schema1) as dst: dst.writerecords(features1) # test the data was written correctly with fiona.open(path, "r") as src: assert len(src) == 2 def test_write_json_invalid_directory(tmpdir): """Attempt to create a file in a directory that doesn't exist""" path = str(tmpdir.join('does-not-exist', 'foo.json')) schema = {"geometry": "Unknown", "properties": [("title", "str")]} with pytest.raises(DriverError): fiona.open(path, "w", driver="GeoJSON", schema=schema) Fiona-1.8.21/tests/test_geometry.py000066400000000000000000000110021420023252700172340ustar00rootroot00000000000000"""Tests for geometry objects.""" import pytest from fiona._geometry import (GeomBuilder, geometryRT) from fiona.errors import UnsupportedGeometryTypeError def geometry_wkb(wkb): try: wkb = bytes.fromhex(wkb) except AttributeError: wkb = wkb.decode('hex') return GeomBuilder().build_wkb(wkb) def test_ogr_builder_exceptions(): geom = {'type': "Bogus", 'coordinates': None} with pytest.raises(ValueError): geometryRT(geom) @pytest.mark.parametrize('geom_type, coordinates', [ ('Point', (0.0, 0.0)), ('LineString', [(0.0, 0.0), (1.0, 1.0)]), ('Polygon', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), ('MultiPoint', [(0.0, 0.0), (1.0, 1.0)]), ('MultiLineString', [[(0.0, 0.0), (1.0, 1.0)]]), ('MultiPolygon', [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]]), ]) def test_round_tripping(geom_type, coordinates): result = geometryRT({'type': geom_type, 'coordinates': coordinates}) assert result['type'] == geom_type assert result['coordinates'] == coordinates @pytest.mark.parametrize('geom_type, coordinates', [ ('Polygon', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]), ('MultiPolygon', [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]), ]) def test_implicitly_closed_round_tripping(geom_type, coordinates): result = geometryRT({'type': geom_type, 'coordinates': coordinates}) assert result['type'] == geom_type result_coordinates = result['coordinates'] while not isinstance(coordinates[0], tuple): result_coordinates = result_coordinates[0] coordinates = coordinates[0] assert result_coordinates[:-1] == coordinates def test_geometry_collection_round_trip(): geom = { 'type': "GeometryCollection", 'geometries': [ {'type': "Point", 'coordinates': (0.0, 0.0)}, { 'type': "LineString", 'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]} result = geometryRT(geom) assert len(result['geometries']) == 2 assert [g['type'] for g in result['geometries']] == ['Point', 'LineString'] def test_point_wkb(): # Hex-encoded Point (0 0) wkb = "010100000000000000000000000000000000000000" geom = geometry_wkb(wkb) assert geom['type'] == "Point" assert geom['coordinates'] == (0.0, 0.0) def test_line_wkb(): # Hex-encoded LineString (0 0, 1 1) wkb = ("01020000000200000000000000000000000000000000000000000000000000f03f" "000000000000f03f") geom = geometry_wkb(wkb) assert geom['type'] == "LineString" assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] def test_polygon_wkb(): # 1 x 1 box (0, 0, 1, 1) wkb = ("01030000000100000005000000000000000000f03f000000000000000000000000" "0000f03f000000000000f03f0000000000000000000000000000f03f0000000000" "0000000000000000000000000000000000f03f0000000000000000") geom = geometry_wkb(wkb) assert geom['type'], "Polygon" assert len(geom['coordinates']) == 1 assert len(geom['coordinates'][0]) == 5 x, y = zip(*geom['coordinates'][0]) assert min(x) == 0.0 assert min(y) == 0.0 assert max(x) == 1.0 assert max(y) == 1.0 def test_multipoint_wkb(): wkb = ("010400000002000000010100000000000000000000000000000000000000010100" "0000000000000000f03f000000000000f03f") geom = geometry_wkb(wkb) assert geom['type'] == "MultiPoint" assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] def test_multilinestring_wkb(): # Hex-encoded LineString (0 0, 1 1) wkb = ("010500000001000000010200000002000000000000000000000000000000000000" "00000000000000f03f000000000000f03f") geom = geometry_wkb(wkb) assert geom['type'] == "MultiLineString" assert len(geom['coordinates']) == 1 assert len(geom['coordinates'][0]) == 2 assert geom['coordinates'][0] == [(0.0, 0.0), (1.0, 1.0)] def test_multipolygon_wkb(): # [1 x 1 box (0, 0, 1, 1)] wkb = ("01060000000100000001030000000100000005000000000000000000f03f000000" "0000000000000000000000f03f000000000000f03f000000000000000000000000" "0000f03f00000000000000000000000000000000000000000000f03f0000000000" "000000") geom = geometry_wkb(wkb) assert geom['type'] == "MultiPolygon" assert len(geom['coordinates']) == 1 assert len(geom['coordinates'][0]) == 1 assert len(geom['coordinates'][0][0]) == 5 x, y = zip(*geom['coordinates'][0][0]) assert min(x) == 0.0 assert min(y) == 0.0 assert max(x) == 1.0 assert max(y) == 1.0 Fiona-1.8.21/tests/test_geopackage.py000066400000000000000000000047061420023252700175040ustar00rootroot00000000000000import os import pytest import fiona from .conftest import requires_gpkg example_schema = { 'geometry': 'Point', 'properties': [('title', 'str')], } example_crs = { 'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', 'b': 6370997, 'lat_0': 45, } example_features = [ { "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, "properties": {"title": "One"}, }, { "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, "properties": {"title": "Two"}, }, { "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, "properties": {"title": "Three"}, }, ] @requires_gpkg def test_read_gpkg(path_coutwildrnp_gpkg): """ Implicitly tests writing gpkg as the fixture will create the data source on first request """ with fiona.open(path_coutwildrnp_gpkg, "r") as src: assert len(src) == 67 feature = next(iter(src)) assert feature["geometry"]["type"] == "Polygon" assert feature["properties"]["NAME"] == "Mount Naomi Wilderness" @requires_gpkg def test_write_gpkg(tmpdir): path = str(tmpdir.join('foo.gpkg')) with fiona.open(path, 'w', driver='GPKG', schema=example_schema, crs=example_crs) as dst: dst.writerecords(example_features) with fiona.open(path) as src: assert src.schema['geometry'] == 'Point' assert len(src) == 3 @requires_gpkg def test_write_multilayer_gpkg(tmpdir): """ Test that writing a second layer to an existing geopackage doesn't remove and existing layer for the dataset. """ path = str(tmpdir.join('foo.gpkg')) with fiona.open(path, 'w', driver='GPKG', schema=example_schema, layer="layer1", crs=example_crs) as dst: dst.writerecords(example_features[0:2]) with fiona.open(path, 'w', driver='GPKG', schema=example_schema, layer="layer2", crs=example_crs) as dst: dst.writerecords(example_features[2:]) with fiona.open(path, layer="layer1") as src: assert src.schema['geometry'] == 'Point' assert len(src) == 2 with fiona.open(path, layer="layer2") as src: assert src.schema['geometry'] == 'Point' assert len(src) == 1 Fiona-1.8.21/tests/test_integration.py000066400000000000000000000020651420023252700177350ustar00rootroot00000000000000"""Unittests to verify Fiona is functioning properly with other software.""" import six import fiona def test_dict_subclass(tmpdir): """Rasterio now has a `CRS()` class that subclasses `collections.UserDict()`. Make sure we can receive it. `UserDict()` is a good class to test against because in Python 2 it is not a subclass of `collections.Mapping()`, so it provides an edge case. """ class CRS(six.moves.UserDict): pass outfile = str(tmpdir.join('test_UserDict.geojson')) profile = { 'crs': CRS(init='EPSG:4326'), 'driver': 'GeoJSON', 'schema': { 'geometry': 'Point', 'properties': {} } } with fiona.open(outfile, 'w', **profile) as dst: dst.write({ 'type': 'Feature', 'properties': {}, 'geometry': { 'type': 'Point', 'coordinates': (10, -10) } }) with fiona.open(outfile) as src: assert len(src) == 1 assert src.crs == {'init': 'epsg:4326'} Fiona-1.8.21/tests/test_layer.py000066400000000000000000000041551420023252700165300ustar00rootroot00000000000000import pytest import fiona from .test_collection import TestReading def test_index_selection(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, 'r', layer=0) as c: assert len(c) == 67 class TestFileReading(TestReading): @pytest.fixture(autouse=True) def shapefile(self, path_coutwildrnp_shp): self.c = fiona.open(path_coutwildrnp_shp, 'r', layer='coutwildrnp') yield self.c.close() def test_open_repr(self, path_coutwildrnp_shp): assert ( repr(self.c) == ("".format(path=path_coutwildrnp_shp, id=hex(id(self.c))))) def test_closed_repr(self, path_coutwildrnp_shp): self.c.close() assert ( repr(self.c) == ("".format(path=path_coutwildrnp_shp, id=hex(id(self.c))))) def test_name(self): assert self.c.name == 'coutwildrnp' class TestDirReading(TestReading): @pytest.fixture(autouse=True) def shapefile(self, data_dir): self.c = fiona.open(data_dir, "r", layer="coutwildrnp") yield self.c.close() def test_open_repr(self, data_dir): assert ( repr(self.c) == ("".format(path=data_dir, id=hex(id(self.c))))) def test_closed_repr(self, data_dir): self.c.close() assert ( repr(self.c) == ("".format(path=data_dir, id=hex(id(self.c))))) def test_name(self): assert self.c.name == 'coutwildrnp' def test_path(self, data_dir): assert self.c.path == data_dir def test_invalid_layer(path_coutwildrnp_shp): with pytest.raises(ValueError): fiona.open(path_coutwildrnp_shp, layer="foo") def test_write_invalid_numeric_layer(path_coutwildrnp_shp, tmpdir): with pytest.raises(ValueError): fiona.open(str(tmpdir.join("test-no-iter.shp")), mode='w', layer=0) Fiona-1.8.21/tests/test_listing.py000066400000000000000000000037431420023252700170670ustar00rootroot00000000000000"""Test listing a datasource's layers.""" import logging import sys import os import pytest import fiona import fiona.ogrext from fiona.errors import DriverError, FionaDeprecationWarning def test_single_file_private(path_coutwildrnp_shp): with fiona.Env(): assert fiona.ogrext._listlayers( path_coutwildrnp_shp) == ['coutwildrnp'] def test_single_file(path_coutwildrnp_shp): assert fiona.listlayers(path_coutwildrnp_shp) == ['coutwildrnp'] def test_directory(data_dir): assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] def test_directory_trailing_slash(data_dir): assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] def test_zip_path(path_coutwildrnp_zip): assert fiona.listlayers( 'zip://{}'.format(path_coutwildrnp_zip)) == ['coutwildrnp'] def test_zip_path_arch(path_coutwildrnp_zip): vfs = 'zip://{}'.format(path_coutwildrnp_zip) with pytest.warns(FionaDeprecationWarning): assert fiona.listlayers('/coutwildrnp.shp', vfs=vfs) == ['coutwildrnp'] def test_list_not_existing(data_dir): """Test underlying Cython function correctly raises""" path = os.path.join(data_dir, "does_not_exist.geojson") with pytest.raises(DriverError): fiona.ogrext._listlayers(path) def test_invalid_path(): with pytest.raises(TypeError): fiona.listlayers(1) def test_invalid_vfs(): with pytest.raises(TypeError): fiona.listlayers("/", vfs=1) def test_invalid_path_ioerror(): with pytest.raises(DriverError): fiona.listlayers("foobar") def test_listing_file(path_coutwildrnp_json): """list layers from an open file object""" with open(path_coutwildrnp_json, "rb") as f: assert len(fiona.listlayers(f)) == 1 def test_listing_pathobj(path_coutwildrnp_json): """list layers from a Path object""" pathlib = pytest.importorskip("pathlib") assert len(fiona.listlayers(pathlib.Path(path_coutwildrnp_json))) == 1 Fiona-1.8.21/tests/test_logutils.py000066400000000000000000000036021420023252700172520ustar00rootroot00000000000000"""Tests of skipped field log message filtering""" import logging import os import fiona from fiona.logutils import LogFiltering, FieldSkipLogFilter def test_filtering(caplog): """Test that ordinary log messages pass""" logger = logging.getLogger() with LogFiltering(logger, FieldSkipLogFilter()): logger.warning("Attention!") logger.warning("Skipping field 1") logger.warning("Skipping field 2") logger.warning("Danger!") logger.warning("Skipping field 1") assert len(caplog.records) == 4 assert caplog.records[0].getMessage() == "Attention!" assert caplog.records[1].getMessage() == "Skipping field 1" assert caplog.records[2].getMessage() == "Skipping field 2" assert caplog.records[3].getMessage() == "Danger!" def test_skipping_slice(caplog, data_dir): """Collection filters out all but one warning message""" with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: results = list(src) assert len(results) == 3 assert not any(['skip_me' in f['properties'] for f in results]) assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 def test_skipping_list(caplog, data_dir): """Collection filters out all but one warning message""" with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: results = list(src) assert len(results) == 3 assert not any(['skip_me' in f['properties'] for f in results]) assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 def test_log_filter_exception(caplog): """FieldSkipLogFilter handles exceptions from log.exception().""" logger = logging.getLogger() with LogFiltering(logger, FieldSkipLogFilter()): logger.exception(ValueError("Oh no")) assert len(caplog.records) == 1 assert caplog.records[0].getMessage() == "Oh no" Fiona-1.8.21/tests/test_memoryfile.py000066400000000000000000000150151420023252700175610ustar00rootroot00000000000000"""Tests of MemoryFile and ZippedMemoryFile""" from collections import OrderedDict from io import BytesIO import os import pytest import fiona from fiona.io import MemoryFile, ZipMemoryFile from .conftest import requires_gdal2, requires_gpkg @pytest.fixture(scope='session') def profile_first_coutwildrnp_shp(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as col: return col.profile, next(iter(col)) @pytest.fixture(scope='session') def data_coutwildrnp_json(path_coutwildrnp_json): with open(path_coutwildrnp_json, 'rb') as f: return f.read() def test_memoryfile_ext(): """File extensions are handled""" assert MemoryFile(ext=".geojson").name.endswith(".geojson") def test_memoryfile_bare_ext(): """File extensions without a leading . are handled""" assert MemoryFile(ext="geojson").name.endswith(".geojson") def test_memoryfile_init(data_coutwildrnp_json): """In-memory GeoJSON file can be read""" with MemoryFile(data_coutwildrnp_json) as memfile: with memfile.open() as collection: assert len(collection) == 67 def test_memoryfile_incr_init(data_coutwildrnp_json): """In-memory GeoJSON file written in 2 parts can be read""" with MemoryFile() as memfile: memfile.write(data_coutwildrnp_json[:1000]) memfile.write(data_coutwildrnp_json[1000:]) with memfile.open() as collection: assert len(collection) == 67 def test_zip_memoryfile(bytes_coutwildrnp_zip): """In-memory zipped Shapefile can be read""" with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: with memfile.open('coutwildrnp.shp') as collection: assert len(collection) == 67 def test_zip_memoryfile_infer_layer_name(bytes_coutwildrnp_zip): """In-memory zipped Shapefile can be read with the default layer""" with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: with memfile.open() as collection: assert len(collection) == 67 def test_open_closed(): """Get an exception when opening a dataset on a closed MemoryFile""" memfile = MemoryFile() memfile.close() assert memfile.closed with pytest.raises(IOError): memfile.open() def test_open_closed_zip(): """Get an exception when opening a dataset on a closed ZipMemoryFile""" memfile = ZipMemoryFile() memfile.close() assert memfile.closed with pytest.raises(IOError): memfile.open() def test_write_memoryfile(profile_first_coutwildrnp_shp): """In-memory GeoJSON can be written""" profile, first = profile_first_coutwildrnp_shp profile['driver'] = 'GeoJSON' with MemoryFile() as memfile: with memfile.open(**profile) as col: col.write(first) memfile.seek(0) data = memfile.read() with MemoryFile(data) as memfile: with memfile.open() as col: assert len(col) == 1 @requires_gdal2 def test_memoryfile_write_extension(profile_first_coutwildrnp_shp): """In-memory shapefile gets an .shp extension by default""" profile, first = profile_first_coutwildrnp_shp profile['driver'] = 'ESRI Shapefile' with MemoryFile() as memfile: with memfile.open(**profile) as col: col.write(first) assert memfile.name.endswith(".shp") def test_memoryfile_open_file_or_bytes_read(path_coutwildrnp_json): """Test MemoryFile.open when file_or_bytes has a read attribute """ with open(path_coutwildrnp_json, 'rb') as f: with MemoryFile(f) as memfile: with memfile.open() as collection: assert len(collection) == 67 def test_memoryfile_bytesio(data_coutwildrnp_json): """GeoJSON file stored in BytesIO can be read""" with fiona.open(BytesIO(data_coutwildrnp_json)) as collection: assert len(collection) == 67 def test_memoryfile_fileobj(path_coutwildrnp_json): """GeoJSON file in an open file object can be read""" with open(path_coutwildrnp_json, 'rb') as f: with fiona.open(f) as collection: assert len(collection) == 67 def test_memoryfile_len(data_coutwildrnp_json): """Test MemoryFile.__len__ """ with MemoryFile() as memfile: assert len(memfile) == 0 memfile.write(data_coutwildrnp_json) assert len(memfile) == len(data_coutwildrnp_json) def test_memoryfile_tell(data_coutwildrnp_json): """Test MemoryFile.tell() """ with MemoryFile() as memfile: assert memfile.tell() == 0 memfile.write(data_coutwildrnp_json) assert memfile.tell() == len(data_coutwildrnp_json) def test_write_bytesio(profile_first_coutwildrnp_shp): """GeoJSON can be written to BytesIO""" profile, first = profile_first_coutwildrnp_shp profile['driver'] = 'GeoJSON' with BytesIO() as fout: with fiona.open(fout, 'w', **profile) as col: col.write(first) fout.seek(0) data = fout.read() with MemoryFile(data) as memfile: with memfile.open() as col: assert len(col) == 1 def test_append_bytesio_exception(data_coutwildrnp_json): """Append is not supported, see #1027.""" with pytest.raises(OSError): fiona.open(BytesIO(data_coutwildrnp_json), "a") def test_mapinfo_raises(): """Reported to be a crasher in #937""" driver = 'MapInfo File' schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} with BytesIO() as fout: with pytest.raises(OSError): with fiona.open(fout, "w", driver=driver, schema=schema) as collection: collection.write({"type": "Feature", "geometry": {"type": "Point", "coordinates": (0, 0)}, "properties": {"position": "x"}}) @requires_gpkg def test_read_multilayer_memoryfile(path_coutwildrnp_json, tmpdir): """Test read access to multilayer dataset in from file-like object""" with fiona.open(path_coutwildrnp_json, "r") as src: schema = src.schema features = list(src) path = os.path.join(tmpdir, "test.gpkg") with fiona.open(path, "w", driver="GPKG", schema=schema, layer="layer1") as dst: dst.writerecords(features[0:5]) with fiona.open(path, "w", driver="GPKG", schema=schema, layer="layer2") as dst: dst.writerecords(features[5:]) with open(path, "rb") as f: with fiona.open(f, layer="layer1") as src: assert src.name == "layer1" assert len(src) == 5 # Bug reported in #781 where this next section would fail with open(path, "rb") as f: with fiona.open(f, layer="layer2") as src: assert src.name == "layer2" assert len(src) == 62 Fiona-1.8.21/tests/test_multiconxn.py000066400000000000000000000073561420023252700176220ustar00rootroot00000000000000import pytest import fiona from fiona.compat import OrderedDict class TestReadAccess(object): # To check that we'll be able to get multiple 'r' connections to layers # in a single file. def test_meta(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c2: assert len(c) == len(c2) assert sorted(c.schema.items()) == sorted(c2.schema.items()) def test_feat(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: f1 = next(iter(c)) with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c2: f2 = next(iter(c2)) assert f1 == f2 class TestReadWriteAccess(object): # To check that we'll be able to read from a file that we're # writing to. @pytest.fixture(autouse=True) def multi_write_test_shp(self, tmpdir): self.shapefile_path = str(tmpdir.join("multi_write_test.shp")) self.c = fiona.open( self.shapefile_path, "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') self.f = { 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() yield self.c.close() def test_meta(self): c2 = fiona.open(self.shapefile_path, "r") assert len(self.c) == len(c2) assert sorted(self.c.schema.items()) == sorted(c2.schema.items()) c2.close() def test_read(self): c2 = fiona.open(self.shapefile_path, "r") f2 = next(iter(c2)) del f2['id'] assert self.f == f2 c2.close() def test_read_after_close(self): c2 = fiona.open(self.shapefile_path, "r") self.c.close() f2 = next(iter(c2)) del f2['id'] assert self.f == f2 c2.close() class TestLayerCreation(object): @pytest.fixture(autouse=True) def layer_creation_shp(self, tmpdir): self.dir = tmpdir.mkdir('layer_creation') self.c = fiona.open( str(self.dir), 'w', layer='write_test', driver='ESRI Shapefile', schema={ 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') self.f = { 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() yield self.c.close() def test_meta(self): c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") assert len(self.c) == len(c2) assert sorted(self.c.schema.items()) == sorted(c2.schema.items()) c2.close() def test_read(self): c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") f2 = next(iter(c2)) del f2['id'] assert self.f == f2 c2.close() def test_read_after_close(self): c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") self.c.close() f2 = next(iter(c2)) del f2['id'] assert self.f == f2 c2.close() Fiona-1.8.21/tests/test_non_counting_layer.py000066400000000000000000000016471420023252700213130ustar00rootroot00000000000000import pytest import fiona from fiona.errors import FionaDeprecationWarning @pytest.mark.usefixtures('uttc_path_gpx') class TestNonCountingLayer(object): def setup(self): self.c = fiona.open(self.path_gpx, "r", layer="track_points") def teardown(self): self.c.close() def test_len_fail(self): with pytest.raises(TypeError): len(self.c) def test_list(self): features = list(self.c) assert len(features) == 19 def test_getitem(self): self.c[2] def test_fail_getitem_negative_index(self): with pytest.raises(IndexError): self.c[-1] def test_slice(self): with pytest.warns(FionaDeprecationWarning): features = self.c[2:5] assert len(features) == 3 def test_warn_slice_negative_index(self): with pytest.warns((FionaDeprecationWarning, RuntimeWarning)): self.c[2:-4] Fiona-1.8.21/tests/test_open.py000066400000000000000000000030511420023252700163470ustar00rootroot00000000000000"""Tests of file opening""" import io import os import pytest import fiona from fiona._crs import crs_to_wkt from fiona.errors import DriverError from .conftest import requires_gdal21 def test_open_shp(path_coutwildrnp_shp): """Open a shapefile""" assert fiona.open(path_coutwildrnp_shp) def test_open_filename_with_exclamation(data_dir): path = os.path.relpath(os.path.join(data_dir, "!test.geojson")) assert os.path.exists(path), "Missing test data" assert fiona.open(path), "Failed to open !test.geojson" @requires_gdal21 @pytest.mark.xfail(raises=DriverError) def test_write_memfile_crs_wkt(): example_schema = { "geometry": "Point", "properties": [("title", "str")], } example_features = [ { "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, "properties": {"title": "One"}, }, { "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, "properties": {"title": "Two"}, }, { "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, "properties": {"title": "Three"}, }, ] with io.BytesIO() as fd: with fiona.open( fd, "w", driver="GPKG", schema=example_schema, crs_wkt=crs_to_wkt("EPSG:32611"), ) as dst: dst.writerecords(example_features) fd.seek(0) with fiona.open(fd) as src: assert src.driver == "GPKG" assert src.crs == {"init": "epsg:32611"} Fiona-1.8.21/tests/test_profile.py000066400000000000000000000011301420023252700170420ustar00rootroot00000000000000import os import re import fiona from .conftest import WGS84PATTERN def test_profile(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: assert re.match(WGS84PATTERN, src.crs_wkt) def test_profile_creation_wkt(tmpdir, path_coutwildrnp_shp): outfilename = str(tmpdir.join("test.shp")) with fiona.open(path_coutwildrnp_shp) as src: profile = src.meta profile['crs'] = 'bogus' with fiona.open(outfilename, 'w', **profile) as dst: assert dst.crs == {'init': 'epsg:4326'} assert re.match(WGS84PATTERN, dst.crs_wkt) Fiona-1.8.21/tests/test_props.py000066400000000000000000000107461420023252700165620ustar00rootroot00000000000000import json import os.path from six import text_type import tempfile import fiona from fiona import prop_type, prop_width from fiona.rfc3339 import FionaDateType def test_width_str(): assert prop_width('str:254') == 254 assert prop_width('str') == 80 def test_width_other(): assert prop_width('int') == None assert prop_width('float') == None assert prop_width('date') == None def test_types(): assert prop_type('str:254') == text_type assert prop_type('str') == text_type assert isinstance(0, prop_type('int')) assert isinstance(0.0, prop_type('float')) assert prop_type('date') == FionaDateType def test_read_json_object_properties(): """JSON object properties are properly serialized""" data = """ { "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ 87.33588, 43.53139 ], [ 87.33588, 45.66894 ], [ 90.27542, 45.66894 ], [ 90.27542, 43.53139 ], [ 87.33588, 43.53139 ] ] ] }, "type": "Feature", "properties": { "upperLeftCoordinate": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" } } ] } """ tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with open(filename, 'w') as f: f.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) props = ftr['properties'] assert props['upperLeftCoordinate']['latitude'] == 45.66894 assert props['upperLeftCoordinate']['longitude'] == 87.91166 assert props['tricky'] == "{gotcha" def test_write_json_object_properties(): """Python object properties are properly serialized""" data = """ { "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ 87.33588, 43.53139 ], [ 87.33588, 45.66894 ], [ 90.27542, 45.66894 ], [ 90.27542, 43.53139 ], [ 87.33588, 43.53139 ] ] ] }, "type": "Feature", "properties": { "upperLeftCoordinate": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" } } ] } """ data = json.loads(data)['features'][0] tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with fiona.open( filename, 'w', driver='GeoJSON', schema={ 'geometry': 'Polygon', 'properties': {'upperLeftCoordinate': 'str', 'tricky': 'str'}} ) as dst: dst.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) props = ftr['properties'] assert props['upperLeftCoordinate']['latitude'] == 45.66894 assert props['upperLeftCoordinate']['longitude'] == 87.91166 assert props['tricky'] == "{gotcha" def test_json_prop_decode_non_geojson_driver(): feature = { "type": "Feature", "properties": { "ulc": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" }, "geometry": { "type": "Point", "coordinates": [10, 15] } } meta = { 'crs': 'EPSG:4326', 'driver': 'ESRI Shapefile', 'schema': { 'geometry': 'Point', 'properties': { 'ulc': 'str:255', 'tricky': 'str:255' } } } tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with fiona.open(filename, 'w', **meta) as dst: dst.write(feature) with fiona.open(filename) as src: actual = next(iter(src)) assert isinstance(actual['properties']['ulc'], text_type) a = json.loads(actual['properties']['ulc']) e = json.loads(actual['properties']['ulc']) assert e == a assert actual['properties']['tricky'].startswith('{') Fiona-1.8.21/tests/test_read_drivers.py000066400000000000000000000012141420023252700200560ustar00rootroot00000000000000import pytest import fiona from fiona.errors import FionaValueError def test_read_fail(path_coutwildrnp_shp): with pytest.raises(FionaValueError): fiona.open(path_coutwildrnp_shp, driver='GeoJSON') with pytest.raises(FionaValueError): fiona.open(path_coutwildrnp_shp, enabled_drivers=['GeoJSON']) def test_read(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, driver='ESRI Shapefile') as src: assert src.driver == 'ESRI Shapefile' with fiona.open( path_coutwildrnp_shp, enabled_drivers=['GeoJSON', 'ESRI Shapefile']) as src: assert src.driver == 'ESRI Shapefile' Fiona-1.8.21/tests/test_remove.py000066400000000000000000000072321420023252700167100ustar00rootroot00000000000000import logging import sys import os import itertools from .conftest import requires_gpkg import pytest import fiona from fiona.errors import DatasetDeleteError def create_sample_data(filename, driver, **extra_meta): meta = { 'driver': driver, 'schema': { 'geometry': 'Point', 'properties': {} } } meta.update(extra_meta) with fiona.open(filename, 'w', **meta) as dst: dst.write({ 'geometry': { 'type': 'Point', 'coordinates': (0, 0), }, 'properties': {}, }) assert(os.path.exists(filename)) drivers = ["ESRI Shapefile", "GeoJSON"] kinds = ["path", "collection"] specify_drivers = [True, False] test_data = itertools.product(drivers, kinds, specify_drivers) @pytest.mark.parametrize("driver, kind, specify_driver", test_data) def test_remove(tmpdir, kind, driver, specify_driver): """Test various dataset removal operations""" extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] filename = "delete_me.{extension}".format(extension=extension) output_filename = str(tmpdir.join(filename)) create_sample_data(output_filename, driver=driver) if kind == "collection": to_delete = fiona.open(output_filename, "r") else: to_delete = output_filename assert os.path.exists(output_filename) if specify_driver: fiona.remove(to_delete, driver=driver) else: fiona.remove(to_delete) assert not os.path.exists(output_filename) def test_remove_nonexistent(tmpdir): """Attempting to remove a file that does not exist results in an IOError""" filename = str(tmpdir.join("does_not_exist.shp")) assert not os.path.exists(filename) with pytest.raises(IOError): fiona.remove(filename) @requires_gpkg def test_remove_layer(tmpdir): filename = str(tmpdir.join("a_filename.gpkg")) create_sample_data(filename, "GPKG", layer="layer1") create_sample_data(filename, "GPKG", layer="layer2") create_sample_data(filename, "GPKG", layer="layer3") create_sample_data(filename, "GPKG", layer="layer4") assert fiona.listlayers(filename) == ["layer1", "layer2", "layer3", "layer4"] # remove by index fiona.remove(filename, layer=2) assert fiona.listlayers(filename) == ["layer1", "layer2", "layer4"] # remove by name fiona.remove(filename, layer="layer2") assert fiona.listlayers(filename) == ["layer1", "layer4"] # remove by negative index fiona.remove(filename, layer=-1) assert fiona.listlayers(filename) == ["layer1"] # invalid layer name with pytest.raises(ValueError): fiona.remove(filename, layer="invalid_layer_name") # invalid layer index with pytest.raises(DatasetDeleteError): fiona.remove(filename, layer=999) def test_remove_layer_shapefile(tmpdir): """Removal of layer in shapefile actually deletes the datasource""" filename = str(tmpdir.join("a_filename.shp")) create_sample_data(filename, "ESRI Shapefile") fiona.remove(filename, layer=0) assert not os.path.exists(filename) def test_remove_layer_geojson(tmpdir): """Removal of layers is not supported by GeoJSON driver The reason for failure is slightly different between GDAL 2.2+ and < 2.2. With < 2.2 the datasource will fail to open in write mode (IOError), while with 2.2+ the datasource will open but the removal operation will fail (not supported). """ filename = str(tmpdir.join("a_filename.geojson")) create_sample_data(filename, "GeoJSON") with pytest.raises((RuntimeError, IOError)): fiona.remove(filename, layer=0) assert os.path.exists(filename) Fiona-1.8.21/tests/test_revolvingdoor.py000066400000000000000000000006221420023252700203060ustar00rootroot00000000000000# Test of opening and closing and opening import fiona def test_write_revolving_door(tmpdir, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: meta = src.meta features = list(src) shpname = str(tmpdir.join('foo.shp')) with fiona.open(shpname, 'w', **meta) as dst: dst.writerecords(features) with fiona.open(shpname) as src: pass Fiona-1.8.21/tests/test_rfc3339.py000066400000000000000000000036561420023252700165150ustar00rootroot00000000000000"""Tests for Fiona's RFC 3339 support.""" import re import pytest from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import group_accessor, pattern_date class TestDateParse(object): def test_yyyymmdd(self): assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0, None) def test_error(self): with pytest.raises(ValueError): parse_date("xxx") class TestTimeParse(object): def test_hhmmss(self): assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmm(self): assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0, None) def test_hhmmssff(self): assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000, None) def test_hhmmssz(self): assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmmssoff(self): assert parse_time("10:11:12-01:30") == (0, 0, 0, 10, 11, 12, 0.0, -90) def test_hhmmssoff2(self): assert parse_time("10:11:12+01:30") == (0, 0, 0, 10, 11, 12, 0.0, 90) def test_error(self): with pytest.raises(ValueError): parse_time("xxx") class TestDatetimeParse(object): def test_yyyymmdd(self): assert ( parse_datetime("2012-01-29T10:11:12") == (2012, 1, 29, 10, 11, 12, 0.0, None)) def test_yyyymmddTZ(self): assert ( parse_datetime("2012-01-29T10:11:12+01:30") == (2012, 1, 29, 10, 11, 12, 0.0, 90)) def test_yyyymmddTZ2(self): assert ( parse_datetime("2012-01-29T10:11:12-01:30") == (2012, 1, 29, 10, 11, 12, 0.0, -90)) def test_error(self): with pytest.raises(ValueError): parse_datetime("xxx") def test_group_accessor_indexerror(): match = re.search(pattern_date, '2012-01-29') g = group_accessor(match) assert g.group(-1) == 0 assert g.group(6) == 0 Fiona-1.8.21/tests/test_rfc64_tin.py000066400000000000000000000046041420023252700172110ustar00rootroot00000000000000"""Tests of features related to GDAL RFC 64 See https://trac.osgeo.org/gdal/wiki/rfc64_triangle_polyhedralsurface_tin. """ import fiona from .conftest import requires_gdal22 def test_tin_shp(path_test_tin_shp): """Convert TIN to MultiPolygon""" with fiona.open(path_test_tin_shp) as col: assert col.schema['geometry'] == 'Unknown' features = list(col) assert len(features) == 1 assert features[0]['geometry']['type'] == 'MultiPolygon' assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], [[(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)]]] @requires_gdal22 def test_tin_csv(path_test_tin_csv): """Convert TIN to MultiPolygon and Triangle to Polygon""" with fiona.open(path_test_tin_csv) as col: assert col.schema['geometry'] == 'Unknown' features = list(col) assert len(features) == 2 assert features[0]['geometry']['type'] == 'MultiPolygon' assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], [[(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)]]] assert features[1]['geometry']['type'] == 'Polygon' assert features[1]['geometry']['coordinates'] == [[(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)]] Fiona-1.8.21/tests/test_schema.py000066400000000000000000000310571420023252700166550ustar00rootroot00000000000000from collections import OrderedDict import fiona from fiona.errors import SchemaError, UnsupportedGeometryTypeError, \ DriverSupportError from fiona.schema import FIELD_TYPES, normalize_field_type import os import tempfile from .conftest import get_temp_filename from fiona.drvsupport import driver_mode_mingdal from fiona.env import GDALVersion import pytest from .conftest import requires_only_gdal1, requires_gdal2 def test_schema_ordering_items(tmpdir): name = str(tmpdir.join('test_scheme.shp')) items = [('title', 'str:80'), ('date', 'date')] with fiona.open(name, 'w', driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': items}) as c: assert list(c.schema['properties'].items()) == items with fiona.open(name) as c: assert list(c.schema['properties'].items()) == items def test_shapefile_schema(tmpdir): name = str(tmpdir.join('test_schema.shp')) items = sorted({ 'AWATER10': 'float', 'CLASSFP10': 'str', 'ZipCodeType': 'str', 'EstimatedPopulation': 'float', 'LocationType': 'str', 'ALAND10': 'float', 'TotalWages': 'float', 'FUNCSTAT10': 'str', 'Long': 'float', 'City': 'str', 'TaxReturnsFiled': 'float', 'State': 'str', 'Location': 'str', 'GSrchCnt': 'float', 'INTPTLAT10': 'str', 'Lat': 'float', 'MTFCC10': 'str', 'Decommisioned': 'str', 'GEOID10': 'str', 'INTPTLON10': 'str'}.items()) with fiona.open(name, 'w', driver="ESRI Shapefile", schema={'geometry': 'Polygon', 'properties': items}) as c: assert list(c.schema['properties'].items()) == items c.write( {'geometry': {'coordinates': [[(-117.882442, 33.783633), (-117.882284, 33.783817), (-117.863348, 33.760016), (-117.863478, 33.760016), (-117.863869, 33.760017), (-117.864, 33.760017999999995), (-117.864239, 33.760019), (-117.876608, 33.755769), (-117.882886, 33.783114), (-117.882688, 33.783345), (-117.882639, 33.783401999999995), (-117.88259, 33.78346), (-117.882442, 33.783633)]], 'type': 'Polygon'}, 'id': '1', 'properties': { 'ALAND10': 8819240.0, 'AWATER10': 309767.0, 'CLASSFP10': 'B5', 'City': 'SANTA ANA', 'Decommisioned': False, 'EstimatedPopulation': 27773.0, 'FUNCSTAT10': 'S', 'GEOID10': '92706', 'GSrchCnt': 0.0, 'INTPTLAT10': '+33.7653010', 'INTPTLON10': '-117.8819759', 'Lat': 33.759999999999998, 'Location': 'NA-US-CA-SANTA ANA', 'LocationType': 'PRIMARY', 'Long': -117.88, 'MTFCC10': 'G6350', 'State': 'CA', 'TaxReturnsFiled': 14635.0, 'TotalWages': 521280485.0, 'ZipCodeType': 'STANDARD'}, 'type': 'Feature'}) assert len(c) == 1 with fiona.open(name) as c: assert ( list(c.schema['properties'].items()) == sorted([('AWATER10', 'float:24.15'), ('CLASSFP10', 'str:80'), ('ZipCodeTyp', 'str:80'), ('EstimatedP', 'float:24.15'), ('LocationTy', 'str:80'), ('ALAND10', 'float:24.15'), ('INTPTLAT10', 'str:80'), ('FUNCSTAT10', 'str:80'), ('Long', 'float:24.15'), ('City', 'str:80'), ('TaxReturns', 'float:24.15'), ('State', 'str:80'), ('Location', 'str:80'), ('GSrchCnt', 'float:24.15'), ('TotalWages', 'float:24.15'), ('Lat', 'float:24.15'), ('MTFCC10', 'str:80'), ('INTPTLON10', 'str:80'), ('GEOID10', 'str:80'), ('Decommisio', 'str:80')])) f = next(iter(c)) assert f['properties']['EstimatedP'] == 27773.0 def test_field_truncation_issue177(tmpdir): name = str(tmpdir.join('output.shp')) kwargs = { 'driver': 'ESRI Shapefile', 'crs': 'EPSG:4326', 'schema': { 'geometry': 'Point', 'properties': [('a_fieldname', 'float')]}} with fiona.open(name, 'w', **kwargs) as dst: rec = {} rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} rec['properties'] = {'a_fieldname': 3.0} dst.write(rec) with fiona.open(name) as src: first = next(iter(src)) assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)} assert first['properties']['a_fieldnam'] == 3.0 def test_unsupported_geometry_type(): tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test-test-geom.shp') profile = { 'driver': 'ESRI Shapefile', 'schema': { 'geometry': 'BOGUS', 'properties': {}}} with pytest.raises(UnsupportedGeometryTypeError): fiona.open(tmpfile, 'w', **profile) @pytest.mark.parametrize('x', list(range(1, 10))) def test_normalize_int32(x): assert normalize_field_type('int:{}'.format(x)) == 'int32' @requires_gdal2 @pytest.mark.parametrize('x', list(range(10, 20))) def test_normalize_int64(x): assert normalize_field_type('int:{}'.format(x)) == 'int64' @pytest.mark.parametrize('x', list(range(0, 20))) def test_normalize_str(x): assert normalize_field_type('str:{}'.format(x)) == 'str' def test_normalize_bool(): assert normalize_field_type('bool') == 'bool' def test_normalize_float(): assert normalize_field_type('float:25.8') == 'float' def generate_field_types(): """ Produce a unique set of field types in a consistent order. This ensures that tests are able to run in parallel. """ types = set(FIELD_TYPES) types.remove(None) return list(sorted(types)) + [None] @pytest.mark.parametrize('x', generate_field_types()) def test_normalize_std(x): assert normalize_field_type(x) == x def test_normalize_error(): with pytest.raises(SchemaError): assert normalize_field_type('thingy') @requires_only_gdal1 @pytest.mark.parametrize('field_type', ['time', 'datetime']) def test_check_schema_driver_support_shp(tmpdir, field_type): with pytest.raises(DriverSupportError): name = str(tmpdir.join('test_scheme.shp')) items = [('field1', field_type)] with fiona.open(name, 'w', driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': items}) as c: pass @requires_only_gdal1 def test_check_schema_driver_support_gpkg(tmpdir): with pytest.raises(DriverSupportError): name = str(tmpdir.join('test_scheme.gpkg')) items = [('field1', 'time')] with fiona.open(name, 'w', driver="GPKG", schema={ 'geometry': 'LineString', 'properties': items}) as c: pass @pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) def test_geometry_only_schema_write(tmpdir, driver): schema = { "geometry": "Polygon", # No properties defined here. } record = {'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} path = str(tmpdir.join(get_temp_filename(driver))) with fiona.open(path, mode='w', driver=driver, schema=schema) as c: c.write(record) with fiona.open(path, mode='r', driver=driver) as c: data = [f for f in c] assert len(data) == 1 assert len(data[0].get('properties', {})) == 0 assert data[0]['geometry'] == record['geometry'] @pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) def test_geometry_only_schema_update(tmpdir, driver): # Guard unsupported drivers if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( *driver_mode_mingdal['a'][driver][:2]): return schema = { "geometry": "Polygon", # No properties defined here. } record1 = { 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} record2 = { 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (2.0, 0.0), (2.0, 2.0), (2.0, 0.0), (0.0, 0.0)]]}} path = str(tmpdir.join(get_temp_filename(driver))) # Create file with fiona.open(path, mode='w', driver=driver, schema=schema) as c: c.write(record1) # Append record with fiona.open(path, mode='a', driver=driver) as c: c.write(record2) with fiona.open(path, mode='r', driver=driver) as c: data = [f for f in c] assert len(data) == 2 for f in data: assert len(f.get('properties', {})) == 0 assert data[0]['geometry'] == record1['geometry'] assert data[1]['geometry'] == record2['geometry'] @pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) def test_property_only_schema_write(tmpdir, driver): schema = { # No geometry defined here. "properties": {'prop1': 'str'} } record1 = {'properties': {'prop1': 'one'}} path = str(tmpdir.join(get_temp_filename(driver))) with fiona.open(path, mode='w', driver=driver, schema=schema) as c: c.write(record1) with fiona.open(path, mode='r', driver=driver) as c: data = [f for f in c] assert len(data) == 1 assert len(data[0].get('properties', {})) == 1 assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' for f in data: assert 'geometry' not in f or f['geometry'] is None @pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) def test_property_only_schema_update(tmpdir, driver): # Guard unsupported drivers if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( *driver_mode_mingdal['a'][driver][:2]): return schema = { # No geometry defined here. "properties": {'prop1': 'str'} } record1 = {'properties': {'prop1': 'one'}} record2 = {'properties': {'prop1': 'two'}} path = str(tmpdir.join(get_temp_filename(driver))) # Create file with fiona.open(path, mode='w', driver=driver, schema=schema) as c: c.write(record1) # Append record with fiona.open(path, mode='a', driver=driver) as c: c.write(record2) with fiona.open(path, mode='r', driver=driver) as c: data = [f for f in c] assert len(data) == 2 for f in data: assert len(f.get('properties', {})) == 1 assert 'geometry' not in f or f['geometry'] is None assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' assert 'prop1' in data[1]['properties'] and data[1]['properties']['prop1'] == 'two' def test_schema_default_fields_wrong_type(tmpdir): """ Test for SchemaError if a default field is specified with a different type""" name = str(tmpdir.join('test.gpx')) schema = {'properties': OrderedDict([('ele', 'str'), ('time', 'datetime')]), 'geometry': 'Point'} with pytest.raises(SchemaError): with fiona.open(name, 'w', driver="GPX", schema=schema) as c: pass Fiona-1.8.21/tests/test_schema_geom.py000066400000000000000000000151251420023252700176620ustar00rootroot00000000000000""" Tests related to the validation of feature geometry types against the schema. """ import fiona import pytest from fiona.errors import GeometryTypeValidationError, UnsupportedGeometryTypeError @pytest.fixture def filename_shp(tmpdir): return str(tmpdir.join("example.shp")) @pytest.fixture def filename_json(tmpdir): return str(tmpdir.join("example.json")) properties = {"name": "str"} PROPERTIES = {"name": "example"} POINT = {"type": "Point", "coordinates": (1.0, 2.0)} LINESTRING = {"type": "LineString", "coordinates": [(1.0, 2.0), (3.0, 4.0)]} POLYGON = {"type": "Polygon", "coordinates": [[(0.0, 0.0), (1.0, 1.0), (0.0, 0.1)]]} MULTILINESTRING = {"type": "MultiLineString", "coordinates": [[(0.0, 0.0), (1.0, 1.0)], [(1.0, 2.0), (3.0, 4.0)]]} GEOMETRYCOLLECTION = {"type": "GeometryCollection", "geometries": [POINT, LINESTRING, POLYGON]} INVALID = {"type": "InvalidType", "coordinates": (42.0, 43.0)} POINT_3D = {"type": "Point", "coordinates": (1.0, 2.0, 3.0)} def write_point(collection): feature = {"geometry": POINT, "properties": PROPERTIES} collection.write(feature) def write_linestring(collection): feature = {"geometry": LINESTRING, "properties": PROPERTIES} collection.write(feature) def write_polygon(collection): feature = {"geometry": POLYGON, "properties": PROPERTIES} collection.write(feature) def write_invalid(collection): feature = {"geometry": INVALID, "properties": PROPERTIES} collection.write(feature) def write_multilinestring(collection): feature = {"geometry": MULTILINESTRING, "properties": PROPERTIES} collection.write(feature) def write_point_3d(collection): feature = {"geometry": POINT_3D, "properties": PROPERTIES} collection.write(feature) def write_geometrycollection(collection): feature = {"geometry": GEOMETRYCOLLECTION, "properties": PROPERTIES} collection.write(feature) def write_null(collection): feature = {"geometry": None, "properties": PROPERTIES} collection.write(feature) def test_point(filename_shp): schema = {"geometry": "Point", "properties": properties} with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: write_point(collection) write_point_3d(collection) write_null(collection) with pytest.raises(GeometryTypeValidationError): write_linestring(collection) with pytest.raises(GeometryTypeValidationError): write_invalid(collection) def test_multi_type(filename_json): schema = {"geometry": ("Point", "LineString"), "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_point(collection) write_linestring(collection) write_null(collection) with pytest.raises(GeometryTypeValidationError): write_polygon(collection) with pytest.raises(GeometryTypeValidationError): write_invalid(collection) def test_unknown(filename_json): """Reading and writing layers with "Unknown" (i.e. any) geometry type""" # write a layer with a mixture of geometry types schema = {"geometry": "Unknown", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_point(collection) write_linestring(collection) write_polygon(collection) write_geometrycollection(collection) write_null(collection) with pytest.raises(GeometryTypeValidationError): write_invalid(collection) # copy the features to a new layer, reusing the layers metadata with fiona.open(filename_json, "r", driver="GeoJSON") as src: filename_dst = filename_json.replace(".json", "_v2.json") assert src.schema["geometry"] == "Unknown" with fiona.open(filename_dst, "w", **src.meta) as dst: dst.writerecords(src) def test_any(filename_json): schema = {"geometry": "Any", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_point(collection) write_linestring(collection) write_polygon(collection) write_geometrycollection(collection) write_null(collection) with pytest.raises(GeometryTypeValidationError): write_invalid(collection) def test_broken(filename_json): schema = {"geometry": "NOT_VALID", "properties": properties} with pytest.raises(UnsupportedGeometryTypeError): with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema): pass def test_broken_list(filename_json): schema = {"geometry": ("Point", "LineString", "NOT_VALID"), "properties": properties} with pytest.raises(UnsupportedGeometryTypeError): collection = fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) def test_invalid_schema(filename_shp): """Features match schema but geometries not supported by driver""" schema = {"geometry": ("Point", "LineString"), "properties": properties} with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: write_linestring(collection) with pytest.raises(RuntimeError): # ESRI Shapefile can only store a single geometry type write_point(collection) def test_esri_multi_geom(filename_shp): """ESRI Shapefile doesn't differentiate between LineString/MultiLineString""" schema = {"geometry": "LineString", "properties": properties} with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: write_linestring(collection) write_multilinestring(collection) with pytest.raises(GeometryTypeValidationError): write_point(collection) def test_3d_schema_ignored(filename_json): schema = {"geometry": "3D Point", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_point(collection) write_point_3d(collection) def test_geometrycollection_schema(filename_json): schema = {"geometry": "GeometryCollection", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_geometrycollection(collection) def test_none_schema(filename_json): schema = {"geometry": None, "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_null(collection) with pytest.raises(GeometryTypeValidationError): write_point(collection) with pytest.raises(GeometryTypeValidationError): write_linestring(collection) Fiona-1.8.21/tests/test_session.py000066400000000000000000000003611420023252700170720ustar00rootroot00000000000000"""Tests of the ogrext.Session class""" import fiona def test_get(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as col: feat3 = col.get(2) assert feat3['properties']['NAME'] == 'Mount Zirkel Wilderness' Fiona-1.8.21/tests/test_slice.py000066400000000000000000000135241420023252700165130ustar00rootroot00000000000000"""Note well: collection slicing is deprecated! """ import tempfile import shutil import os from collections import OrderedDict import pytest from fiona.env import GDALVersion import fiona from fiona.errors import FionaDeprecationWarning from .conftest import get_temp_filename from fiona.drvsupport import supported_drivers, _driver_supports_mode gdal_version = GDALVersion.runtime() def test_collection_get(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: result = src[5] assert result['id'] == '5' def test_collection_slice(path_coutwildrnp_shp): with pytest.warns(FionaDeprecationWarning), fiona.open(path_coutwildrnp_shp) as src: results = src[:5] assert isinstance(results, list) assert len(results) == 5 assert results[4]['id'] == '4' def test_collection_iterator_slice(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: results = list(src.items(5)) assert len(results) == 5 k, v = results[4] assert k == 4 assert v['id'] == '4' def test_collection_iterator_next(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: k, v = next(src.items(5, None)) assert k == 5 assert v['id'] == '5' @pytest.fixture(scope="module", params=[driver for driver in supported_drivers if _driver_supports_mode(driver, 'w') and driver not in {'DGN', 'MapInfo File', 'GPSTrackMaker', 'GPX', 'BNA', 'DXF'}]) def slice_dataset_path(request): """ Create temporary datasets for test_collection_iterator_items_slice()""" driver = request.param min_id = 0 max_id = 9 def get_schema(driver): special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}} return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) def get_records(driver, range): special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, 'properties': {'position': i}} for i in range] } return special_records1.get(driver, [ {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in range]) schema = get_schema(driver) records = get_records(driver, range(min_id, max_id + 1)) create_kwargs = {} if driver == 'FlatGeobuf': create_kwargs['SPATIAL_INDEX'] = False tmpdir = tempfile.mkdtemp() path = os.path.join(tmpdir, get_temp_filename(driver)) with fiona.open(path, 'w', driver=driver, schema=schema, **create_kwargs) as c: c.writerecords(records) yield path shutil.rmtree(tmpdir) @pytest.mark.parametrize("args", [(0, 5, None), (1, 5, None), (-5, None, None), (-5, -1, None), (0, None, None), (5, None, None), (8, None, None), (9, None, None), (10, None, None), (0, 5, 2), (0, 5, 2), (1, 5, 2), (-5, None, 2), (-5, -1, 2), (0, None, 2), (0, 8, 2), (0, 9, 2), (0, 10, 2), (1, 8, 2), (1, 9, 2), (1, 10, 2), (1, None, 2), (5, None, 2), (5, None, -1), (5, None, -2), (5, None, None), (4, None, -2), (-1, -5, -1), (-5, None, -1), (0, 5, 1), (5, 15, 1), (15, 30, 1), (5, 0, -1), (15, 5, -1), (30, 15, -1), (0, 5, 2), (5, 15, 2), (15, 30, 2), (5, 0, -2), (15, 5, -2), (30, 15, -2) ]) @pytest.mark.filterwarnings('ignore:.*OLC_FASTFEATURECOUNT*') @pytest.mark.filterwarnings('ignore:.*OLCFastSetNextByIndex*') def test_collection_iterator_items_slice(slice_dataset_path, args): """ Test if c.items(start, stop, step) returns the correct features. """ start, stop, step = args min_id = 0 max_id = 9 positions = list(range(min_id, max_id + 1))[start:stop:step] with fiona.open(slice_dataset_path, 'r') as c: items = list(c.items(start, stop, step)) assert len(items) == len(positions) record_positions = [int(item[1]['properties']['position']) for item in items] for expected_position, record_position in zip(positions, record_positions): assert expected_position == record_position def test_collection_iterator_keys_next(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: k = next(src.keys(5, None)) assert k == 5 Fiona-1.8.21/tests/test_subtypes.py000066400000000000000000000027541420023252700172750ustar00rootroot00000000000000import fiona import six def test_read_bool_subtype(tmpdir): test_data = """{"type": "FeatureCollection", "features": [{"type": "Feature", "properties": {"bool": true, "not_bool": 1, "float": 42.5}, "geometry": null}]}""" path = tmpdir.join("test_read_bool_subtype.geojson") with open(str(path), "w") as f: f.write(test_data) with fiona.open(str(path), "r") as src: feature = next(iter(src)) if fiona.gdal_version.major >= 2: assert type(feature["properties"]["bool"]) is bool else: assert type(feature["properties"]["bool"]) is int assert isinstance(feature["properties"]["not_bool"], six.integer_types) assert type(feature["properties"]["float"]) is float def test_write_bool_subtype(tmpdir): path = tmpdir.join("test_write_bool_subtype.geojson") schema = { "geometry": "Point", "properties": { "bool": "bool", "not_bool": "int", "float": "float", } } feature = { "geometry": None, "properties": { "bool": True, "not_bool": 1, "float": 42.5, } } with fiona.open(str(path), "w", driver="GeoJSON", schema=schema) as dst: dst.write(feature) with open(str(path), "r") as f: data = f.read() if fiona.gdal_version.major >= 2: assert """"bool": true""" in data else: assert """"bool": 1""" in data assert """"not_bool": 1""" in data Fiona-1.8.21/tests/test_transactions.py000066400000000000000000000050011420023252700201130ustar00rootroot00000000000000import os import fiona import fiona.ogrext import logging from random import uniform, randint from collections import defaultdict import pytest from tests.conftest import requires_gdal2 has_gpkg = "GPKG" in fiona.supported_drivers.keys() def create_records(count): for n in range(count): record = { "geometry": {"type": "Point", "coordinates": [uniform(-180, 180), uniform(-90, 90)]}, "properties": {"value": randint(0, 1000)} } yield record class DebugHandler(logging.Handler): def __init__(self, pattern): logging.Handler.__init__(self) self.pattern = pattern self.history = defaultdict(lambda: 0) def emit(self, record): if self.pattern in record.msg: self.history[record.msg] += 1 log = logging.getLogger() @requires_gdal2 @pytest.mark.skipif(not has_gpkg, reason="Requires geopackage driver") class TestTransaction: def setup_method(self): self.handler = DebugHandler(pattern="transaction") self.handler.setLevel(logging.DEBUG) log.setLevel(logging.DEBUG) log.addHandler(self.handler) def teardown_method(self): log.removeHandler(self.handler) def test_transaction(self, tmpdir): """ Test transaction start/commit is called the expected number of times, and that the default transaction size can be overloaded. The test uses a custom logging handler to listen for the debug messages produced when the transaction is started/comitted. """ num_records = 250 transaction_size = 100 assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == 20000 fiona.ogrext.DEFAULT_TRANSACTION_SIZE = transaction_size assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == transaction_size path = str(tmpdir.join("output.gpkg")) schema = { "geometry": "Point", "properties": {"value": "int"} } with fiona.open(path, "w", driver="GPKG", schema=schema) as dst: dst.writerecords(create_records(num_records)) assert self.handler.history["Starting transaction (initial)"] == 1 assert self.handler.history["Starting transaction (intermediate)"] == num_records // transaction_size assert self.handler.history["Comitting transaction (intermediate)"] == num_records // transaction_size assert self.handler.history["Comitting transaction (final)"] == 1 with fiona.open(path, "r") as src: assert len(src) == num_records Fiona-1.8.21/tests/test_transform.py000066400000000000000000000062371420023252700174320ustar00rootroot00000000000000"""Tests of the transform submodule""" import math import pytest from fiona import transform @pytest.mark.parametrize( "geom", [ {"type": "Point", "coordinates": [0.0, 0.0, 1000.0]}, { "type": "LineString", "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], }, { "type": "MultiPoint", "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], }, { "type": "Polygon", "coordinates": [ [ [0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0], [0.1, -0.1, math.pi], [0.0, 0.0, 1000.0], ] ], }, { "type": "MultiPolygon", "coordinates": [ [ [ [0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0], [0.1, -0.1, math.pi], [0.0, 0.0, 1000.0], ] ] ], }, ], ) def test_transform_geom_with_z(geom): """Transforming a geom with Z succeeds""" g2 = transform.transform_geom("epsg:4326", "epsg:3857", geom, precision=3) @pytest.mark.parametrize("crs", ["epsg:4326", "EPSG:4326", "WGS84", {'init': 'epsg:4326'}, {'proj': 'longlat', 'datum': 'WGS84', 'no_defs': True}, "OGC:CRS84"]) def test_axis_ordering(crs): """ Test if transform uses traditional_axis_mapping """ expected = (-8427998.647958742, 4587905.27136252) t1 = transform.transform(crs, "epsg:3857", [-75.71], [38.06]) assert (t1[0][0], t1[1][0]) == pytest.approx(expected) geom = {"type": "Point", "coordinates": [-75.71, 38.06]} g1 = transform.transform_geom(crs, "epsg:3857", geom, precision=3) assert g1["coordinates"] == pytest.approx(expected) rev_expected = (-75.71, 38.06) t2 = transform.transform("epsg:3857", crs, [-8427998.647958742], [4587905.27136252]) assert (t2[0][0], t2[1][0]) == pytest.approx(rev_expected) geom = {"type": "Point", "coordinates": [-8427998.647958742, 4587905.27136252]} g2 = transform.transform_geom("epsg:3857", crs, geom, precision=3) assert g2["coordinates"] == pytest.approx(rev_expected) def test_transform_issue971(): """ See https://github.com/Toblerity/Fiona/issues/971 """ source_crs = "epsg:25832" dest_src = "epsg:4326" geom = {'type': 'GeometryCollection', 'geometries': [{'type': 'LineString', 'coordinates': [(512381.8870945257, 5866313.311218272), (512371.23869999964, 5866322.282500001), (512364.6014999999, 5866328.260199999)]}]} geom_transformed = transform.transform_geom(source_crs, dest_src, geom, precision=3) assert geom_transformed['geometries'][0]['coordinates'][0] == pytest.approx((9.184, 52.946)) Fiona-1.8.21/tests/test_unicode.py000066400000000000000000000124721420023252700170430ustar00rootroot00000000000000# coding: utf-8 import logging import os import shutil import sys import tempfile from collections import OrderedDict import pytest import fiona from fiona.errors import SchemaError class TestUnicodePath(object): def setup(self): tempdir = tempfile.mkdtemp() self.dir = os.path.join(tempdir, u'français') shutil.copytree(os.path.join(os.path.dirname(__file__), 'data'), self.dir) def teardown(self): shutil.rmtree(os.path.dirname(self.dir)) def test_unicode_path(self): path = self.dir + '/coutwildrnp.shp' with fiona.open(path) as c: assert len(c) == 67 def test_unicode_path_layer(self): path = self.dir layer = 'coutwildrnp' with fiona.open(path, layer=layer) as c: assert len(c) == 67 def test_utf8_path(self): path = self.dir + '/coutwildrnp.shp' if sys.version_info < (3,): with fiona.open(path) as c: assert len(c) == 67 class TestUnicodeStringField(object): def setup(self): self.tempdir = tempfile.mkdtemp() def teardown(self): shutil.rmtree(self.tempdir) @pytest.mark.xfail(reason="OGR silently fails to convert strings") def test_write_mismatch(self): """TOFIX: OGR silently fails to convert strings""" # Details: # # If we tell OGR that we want a latin-1 encoded output file and # give it a feature with a unicode property that can't be converted # to latin-1, no error is raised and OGR just writes the utf-8 # encoded bytes to the output file. # # This might be shapefile specific. # # Consequences: no error on write, but there will be an error # on reading the data and expecting latin-1. schema = { 'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write-fail.shp"), 'w', driver="ESRI Shapefile", schema=schema, encoding='latin1') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': { 'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: f = next(iter(c)) # Next assert fails. assert f['properties']['label'] == u'徐汇区' def test_write_utf8(self): schema = { 'geometry': 'Point', 'properties': {'label': 'str', u'verit\xe9': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write.shp"), "w", "ESRI Shapefile", schema=schema, encoding='utf-8') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': { 'label': u'Ba\u2019kelalan', u'verit\xe9': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='utf-8') as c: f = next(iter(c)) assert f['properties']['label'] == u'Ba\u2019kelalan' assert f['properties'][u'verit\xe9'] == 0 @pytest.mark.iconv def test_write_gb18030(self): """Can write a simplified Chinese shapefile""" schema = { 'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write-gb18030.shp"), 'w', driver="ESRI Shapefile", schema=schema, encoding='gb18030') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: f = next(iter(c)) assert f['properties']['label'] == u'徐汇区' assert f['properties']['num'] == 0 @pytest.mark.iconv def test_gb2312_field_wrong_encoding(self): """Attempt to create field with a name not supported by the encoding ESRI Shapefile driver defaults to ISO-8859-1 encoding if none is specified. This doesn't support the field name used. Previously this went undetected and would raise a KeyError later when the user tried to write a feature to the layer. Instead we raise a more useful error. See GH#595. """ field_name = u"区县名称" meta = { "schema": { "properties": OrderedDict([(field_name, "int")]), "geometry": "Point", }, "driver": "ESRI Shapefile", } feature = { "properties": {field_name: 123}, "geometry": {"type": "Point", "coordinates": [1, 2]} } # when encoding is specified, write is successful with fiona.open(os.path.join(self.tempdir, "test1.shp"), "w", encoding="GB2312", **meta) as collection: collection.write(feature) # no encoding with pytest.raises(SchemaError): fiona.open(os.path.join(self.tempdir, "test2.shp"), "w", **meta) Fiona-1.8.21/tests/test_version.py000066400000000000000000000046161420023252700171030ustar00rootroot00000000000000import fiona import platform import re import os import sys from tests.conftest import travis_only from fiona._env import GDALVersion, get_gdal_release_name def test_version_tuple(): version = fiona.gdal_version assert version.major >= 1 and isinstance(version.major, int) assert version.minor >= 0 and isinstance(version.minor, int) assert version.revision >= 0 and isinstance(version.revision, int) def test_version_comparison(): # version against version assert GDALVersion(4, 0, 0) > GDALVersion(3, 2, 1) assert GDALVersion(2, 0, 0) < GDALVersion(3, 2, 1) assert GDALVersion(3, 2, 2) > GDALVersion(3, 2, 1) assert GDALVersion(3, 2, 0) < GDALVersion(3, 2, 1) # tuple against version assert (4, 0, 0) > GDALVersion(3, 2, 1) assert (2, 0, 0) < GDALVersion(3, 2, 1) assert (3, 2, 2) > GDALVersion(3, 2, 1) assert (3, 2, 0) < GDALVersion(3, 2, 1) @travis_only def test_show_versions(capsys): version_pattern = re.compile(r"(\d+).(\d+).(\d+)") os_info = "{system} {release}".format(system=platform.system(), release=platform.release()) python_version = platform.python_version() python_exec = sys.executable msg = ("Fiona version: {fiona_version}" "\nGDAL version: {gdal_release_name}" "\nPROJ version: {proj_version}" "\n" "\nOS: {os_info}" "\nPython: {python_version}" "\nPython executable: '{python_exec}'" "\n" ) if fiona.gdal_version < GDALVersion(3, 0, 1): proj_version = "Proj version not available" else: proj_version = os.getenv("PROJVERSION") proj_version = re.match(version_pattern, proj_version).group(0) gdal_version = os.getenv("GDALVERSION") if not gdal_version == "master": gdal_version = re.match(version_pattern, gdal_version).group(0) else: gdal_version = get_gdal_release_name() msg_formatted = msg.format(fiona_version=fiona.__version__, gdal_release_name=gdal_version, proj_version=proj_version, os_info=os_info, python_version=python_version, python_exec=python_exec) fiona.show_versions() captured = capsys.readouterr() assert captured.out.strip() == msg_formatted.strip() Fiona-1.8.21/tests/test_vfs.py000066400000000000000000000146461420023252700162200ustar00rootroot00000000000000import logging import sys import os import pytest import boto3 import fiona from fiona.errors import FionaDeprecationWarning from fiona.vfs import vsi_path, parse_paths from .test_collection import TestReading from .test_collection_legacy import ReadingTest # Custom markers (from rasterio) mingdalversion = pytest.mark.skipif( fiona.gdal_version < (2, 1, 0), reason="S3 raster access requires GDAL 2.1") credentials = pytest.mark.skipif( not(boto3.Session()._session.get_credentials()), reason="S3 raster access requires credentials") # TODO: remove this once we've successfully moved the tar tests over # to TestVsiReading. class VsiReadingTest(ReadingTest): # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 # in which the VSI driver reports the wrong number of features. # I'm overriding ReadingTest's test_filter_1 with a function that # passes and creating a new method in this class that we can exclude # from the test runner at run time. @pytest.mark.xfail(reason="The number of features present in the archive " "differs based on the GDAL version.") def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) assert len(results) == 67 f = results[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' class TestVsiReading(TestReading): # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 # in which the VSI driver reports the wrong number of features. # I'm overriding TestReading's test_filter_1 with a function that # passes and creating a new method in this class that we can exclude # from the test runner at run time. @pytest.mark.xfail(reason="The number of features present in the archive " "differs based on the GDAL version.") def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) assert len(results) == 67 f = results[0] assert f['id'] == "0" assert f['properties']['STATE'] == 'UT' class TestZipReading(TestVsiReading): @pytest.fixture(autouse=True) def zipfile(self, data_dir, path_coutwildrnp_zip): self.c = fiona.open("zip://{}".format(path_coutwildrnp_zip, "r")) self.path = os.path.join(data_dir, 'coutwildrnp.zip') yield self.c.close() def test_open_repr(self): assert ( repr(self.c) == ("".format( id=hex(id(self.c)), path=self.path))) def test_closed_repr(self): self.c.close() assert ( repr(self.c) == ("".format( id=hex(id(self.c)), path=self.path))) def test_path(self): assert self.c.path == '/vsizip/{path}'.format(path=self.path) class TestZipArchiveReading(TestVsiReading): @pytest.fixture(autouse=True) def zipfile(self, data_dir, path_coutwildrnp_zip): vfs = 'zip://{}'.format(path_coutwildrnp_zip) self.c = fiona.open(vfs + "!coutwildrnp.shp", "r") self.path = os.path.join(data_dir, 'coutwildrnp.zip') yield self.c.close() def test_open_repr(self): assert ( repr(self.c) == ("".format( id=hex(id(self.c)), path=self.path))) def test_closed_repr(self): self.c.close() assert ( repr(self.c) == ("".format( id=hex(id(self.c)), path=self.path))) def test_path(self): assert (self.c.path == '/vsizip/{path}/coutwildrnp.shp'.format(path=self.path)) class TestZipArchiveReadingAbsPath(TestZipArchiveReading): @pytest.fixture(autouse=True) def zipfile(self, path_coutwildrnp_zip): vfs = 'zip://{}'.format(os.path.abspath(path_coutwildrnp_zip)) self.c = fiona.open(vfs + "!coutwildrnp.shp", "r") yield self.c.close() def test_open_repr(self): assert repr(self.c).startswith("".format( id=hex(id(self.c)), path=self.path))) def test_closed_repr(self): self.c.close() assert ( repr(self.c) == ("".format( id=hex(id(self.c)), path=self.path))) def test_path(self): assert ( self.c.path == '/vsitar/{path}/testing/coutwildrnp.shp'.format(path=self.path)) @pytest.mark.network def test_open_http(): ds = fiona.open('https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp') assert len(ds) == 10 @credentials @mingdalversion @pytest.mark.network def test_open_s3(): ds = fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') assert len(ds) == 67 @pytest.mark.network def test_open_zip_https(): ds = fiona.open('zip+https://s3.amazonaws.com/fiona-testing/coutwildrnp.zip') assert len(ds) == 67 def test_parse_path(): assert parse_paths("zip://foo.zip") == ("foo.zip", "zip", None) def test_parse_path2(): assert parse_paths("foo") == ("foo", None, None) def test_parse_vfs(): assert parse_paths("/", "zip://foo.zip") == ("/", "zip", "foo.zip") Fiona-1.8.21/tests/test_write.py000066400000000000000000000013401420023252700165370ustar00rootroot00000000000000"""New tests of writing feature collections.""" import fiona from fiona.crs import from_epsg def test_issue771(tmpdir, caplog): """Overwrite a GeoJSON file without logging errors.""" schema = {"geometry": "Point", "properties": {"zero": "int"}} feature = { "geometry": {"type": "Point", "coordinates": (0, 0)}, "properties": {"zero": "0"}, } outputfile = tmpdir.join("test.geojson") for i in range(2): with fiona.open( str(outputfile), "w", driver="GeoJSON", schema=schema, crs=from_epsg(4326) ) as collection: collection.write(feature) assert outputfile.exists() for record in caplog.records: assert record.levelname != "ERROR"