pax_global_header00006660000000000000000000000064131744605230014517gustar00rootroot0000000000000052 comment=1669b439c1187a2be79176df8025fa76f13c3e55 Fiona-1.7.10/000077500000000000000000000000001317446052300126415ustar00rootroot00000000000000Fiona-1.7.10/.gitignore000066400000000000000000000017121317446052300146320ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # IDE's etc. .idea/ venv/ venv2/ # fiona VERSION.txt fiona/ogrext.c fiona/_crs.c fiona/_drivers.c fiona/_err.c fiona/_geometry.c fiona/_transform.cpp fiona/ograpi.pxd fiona/ogrext.pyx tests/data/coutwildrnp.json tests/data/coutwildrnp.tar tests/data/coutwildrnp.zip Fiona-1.7.10/.travis.yml000066400000000000000000000034421317446052300147550ustar00rootroot00000000000000language: python sudo: false cache: directories: - $GDALINST - ~/.cache/pip env: global: - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels - GDALINST=$HOME/gdalinstall - GDALBUILD=$HOME/gdalbuild matrix: - GDALVERSION="1.9.2" - GDALVERSION="1.11.5" - GDALVERSION="2.0.3" - GDALVERSION="2.1.1" addons: apt: packages: - gdal-bin - libproj-dev - libhdf5-serial-dev - libgdal-dev - libatlas-dev - libatlas-base-dev - gfortran python: - "2.7" - "3.5" before_install: - pip install -U pip - pip install setuptools==36.0.1 - pip install wheel - . ./scripts/travis_gdal_install.sh - export PATH=$GDALINST/gdal-$GDALVERSION/bin:$PATH - export LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$LD_LIBRARY_PATH - export GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal - export PROJ_LIB=/usr/share/proj - gdal-config --version install: - "if [ $(gdal-config --version) == \"$GDALVERSION\" ]; then echo \"Using gdal $GDALVERSION\"; else echo \"NOT using gdal $GDALVERSION as expected; aborting\"; exit 1; fi" - "pip wheel -r requirements-dev.txt" - "pip install -r requirements-dev.txt" - "python setup.py sdist" - "pip install --upgrade --force-reinstall --global-option=build_ext --global-option='-I$GDALINST/gdal-$GDALVERSION/include' --global-option='-L$GDALINST/gdal-$GDALVERSION/lib' --global-option='-R$GDALINST/gdal-$GDALVERSION/lib' dist/Fiona*" - "fio --version" - "cp -r tests /tmp" script: - "cd /tmp && coverage run --source=fiona --omit='*.pxd,*.pyx,*/tests/*,*/docs/*,*/examples/*,*/benchmarks/*' -m nose --exclude test_filter_vsi --exclude test_geopackage --exclude test_write_mismatch tests" after_success: - coveralls || echo "!! intermittent coveralls failure" Fiona-1.7.10/CHANGES.txt000066400000000000000000000414411317446052300144560ustar00rootroot00000000000000Changes ======= All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. 1.7.10 (2017-10-26) ------------------- Bug fixes: - An extraneous printed line from the ``rio cat --layers`` validator has been removed (#478). Packaging: - Official OS X and Manylinux1 wheels (on PyPI) for this release will be compatible with Shapely 1.6.2 and Rasterio 1.0a10 wheels. 1.7.9.post1 (2017-08-21) ------------------------ This release introduces no changes in the Fiona package. It upgrades GDAL from 2.2.0 to 2.2.1 in wheels that we publish to the Python Package Index. 1.7.9 (2017-08-17) ------------------ Bug fixes: - Acquire the GIL for GDAL error callback functions to prevent crashes when GDAL errors occur when the GIL has been released by user code. - Sync and flush layers when closing even when the number of features is not precisely known (#467). 1.7.8 (2017-06-20) ------------------ Bug fixes: - Provide all arguments needed by CPLError based exceptions (#456). 1.7.7 (2017-06-05) ------------------ Bug fixes: - Switch logger `warn()` (deprecated) calls to `warning()`. - Replace all relative imports and cimports in Cython modules with absolute imports (#450). - Avoid setting `PROJ_LIB` to a non-existent directory (#439). 1.7.6 (2017-04-26) ------------------ Bug fixes: - Fall back to `share/proj` for PROJ_LIB (#440). - Replace every call to `OSRDestroySpatialReference()` with `OSRRelease()`, fixing the GPKG driver crasher reported in #441 (#443). - Add a `DriverIOError` derived from `IOError` to use for driver-specific errors such as the GeoJSON driver's refusal to overwrite existing files. Also we now ensure that when this error is raised by `fiona.open()` any created read or write session is deleted, this eliminates spurious exceptions on teardown of broken `Collection` objects (#437, #444). 1.7.5 (2017-03-20) ------------------ Bug fixes: - Opening a data file in read (the default) mode with `fiona.open()` using the the `driver` or `drivers` keyword arguments (to specify certain format drivers) would sometimes cause a crash on Windows due to improperly terminated lists of strings (#428). The fix: Fiona's buggy `string_list()` has been replaced by GDAL's `CSLAddString()`. 1.7.4 (2017-02-20) ------------------ Bug fixes: - OGR's EsriJSON detection fails when certain keys aren't found in the first 6000 bytes of data passed to `BytesCollection` (#422). A .json file extension is now explicitly given to the in-memory file behind `BytesCollection` when the `driver='GeoJSON'` keyword argument is given (#423). 1.7.3 (2017-02-14) ------------------ Roses are red. Tan is a pug. Software regression's the most embarrassing bug. Bug fixes: - Use __stdcall for GDAL error handling callback on Windows as in Rasterio. - Turn on latent support for zip:// URLs in rio-cat and rio-info (#421). - The 1.7.2 release broke support for zip files with absolute paths (#418). This regression has been fixed with tests to confirm. 1.7.2 (2017-01-27) ------------------ Future Deprecation: - `Collection.__next__()` is buggy in that it can lead to duplication of features when used in combination with `Collection.filter()` or `Collection.__iter__()`. It will be removed in Fiona 2.0. Please check for usage of this deprecated feature by running your tests or programs with `PYTHONWARNINGS="always:::fiona"` or `-W"always:::fiona"` and switch from `next(collection)` to `next(iter(collection))` (#301). Bug fix: - Zipped streams of bytes can be accessed by `BytesCollection` (#318). 1.7.1.post1 (2016-12-23) ------------------------ - New binary wheels using version 1.2.0 of sgillies/frs-wheel-builds. See https://github.com/sgillies/frs-wheel-builds/blob/master/CHANGES.txt. 1.7.1 (2016-11-16) ------------------ Bug Fixes: - Prevent Fiona from stumbling over 'Z', 'M', and 'ZM' geometry types introduced in GDAL 2.1 (#384). Fiona 1.7.1 doesn't add explicit support for these types, they are coerced to geometry types 1-7 ('Point', 'LineString', etc.) - Raise an `UnsupportedGeometryTypeError` when a bogus or unsupported geometry type is encountered in a new collection's schema or elsewhere (#340). - Enable `--precision 0` for fio-cat (#370). - Prevent datetime exceptions from unnecessarily stopping collection iteration by yielding `None` (#385) - Replace log.warn calls with log.warning calls (#379). - Print an error message if neither gdal-config or `--gdalversion` indicate a GDAL C API version when running `setup.py` (#364). - Let dict-like subclasses through CRS type checks (#367). 1.7.0post2 (2016-06-15) ----------------------- Packaging: define extension modules for 'clean' and 'config' targets (#363). 1.7.0post1 (2016-06-15) ----------------------- Packaging: No files are copied for the 'clean' setup target (#361, #362). 1.7.0 (2016-06-14) ------------------ The C extension modules in this library can now be built and used with either a 1.x or 2.x release of the GDAL library. Big thanks to René Buffat for leading this effort. Refactoring: - The `ogrext1.pyx` and `ogrext2.pyx` files now use separate C APIs defined in `ogrext1.pxd` and `ogrex2.pxd`. The other extension modules have been refactored so that they do not depend on either of these modules and use subsets of the GDAL/OGR API compatible with both GDAL 1.x and 2.x (#359). Packaging: - Source distributions now contain two different sources for the `ogrext` extension module. The `ogrext1.c` file will be used with GDAL 1.x and the `ogrext2.c` file will be used with GDAL 2.x. 1.7b2 (2016-06-13) ------------------ - New feature: enhancement of the `--layer` option for fio-cat and fio-dump to allow separate layers of one or more multi-layer input files to be selected (#349). 1.7b1 (2016-06-10) ------------------ - New feature: support for GDAL version 2+ (#259). - New feature: a new fio-calc CLI command (#273). - New feature: `--layer` options for fio-info (#316) and fio-load (#299). - New feature: a `--no-parse` option for fio-collect that lets a careful user avoid extra JSON serialization and deserialization (#306). - Bug fix: `+wktext` is now preserved when serializing CRS from WKT to PROJ.4 dicts (#352). - Bug fix: a small memory leak when opening a collection has been fixed (#337). - Bug fix: internal unicode errors now result in a log message and a `UnicodeError` exception, not a `TypeError` (#356). 1.6.4 (2016-05-06) ------------------ - Raise ImportError if the active GDAL library version is >= 2.0 instead of failing unpredictably (#338, #341). Support for GDAL>=2.0 is coming in Fiona 1.7. 1.6.3.post1 (2016-03-27) ------------------------ - No changes to the library in this post-release version, but there is a significant change to the distributions on PyPI: to help make Fiona more compatible with Shapely on OS X, the GDAL shared library included in the macosx (only) binary wheels now statically links the GEOS library. See https://github.com/sgillies/frs-wheel-builds/issues/5. 1.6.3 (2015-12-22) ------------------ - Daytime has been decreasing in the Northern Hemisphere, but is now increasing again as it should. - Non-UTF strings were being passed into OGR functions in some situations and on Windows this would sometimes crash a Python process (#303). Fiona now raises errors derived from UnicodeError when field names or field values can't be encoded. 1.6.2 (2015-09-22) ------------------ - Providing only PROJ4 representations in the dataset meta property resulted in loss of CRS information when using the `fiona.open(..., **src.meta) as dst` pattern (#265). This bug has been addressed by adding a crs_wkt item to the` meta property and extending the `fiona.open()` and the collection constructor to look for and prioritize this keyword argument. 1.6.1 (2015-08-12) ------------------ - Bug fix: Fiona now deserializes JSON-encoded string properties provided by the OGR GeoJSON driver (#244, #245, #246). - Bug fix: proj4 data was not copied properly into binary distributions due to a typo (#254). Special thanks to WFMU DJ Liz Berg for the awesome playlist that's fueling my release sprint. Check it out at http://wfmu.org/playlists/shows/62083. You can't unhear Love Coffin. 1.6.0 (2015-07-21) ------------------ - Upgrade Cython requirement to 0.22 (#214). - New BytesCollection class (#215). - Add GDAL's OpenFileGDB driver to registered drivers (#221). - Implement CLI commands as plugins (#228). - Raise click.abort instead of calling sys.exit, preventing suprising exits (#236). 1.5.1 (2015-03-19) ------------------ - Restore test data to sdists by fixing MANIFEST.in (#216). 1.5.0 (2015-02-02) ------------------ - Finalize GeoJSON feature sequence options (#174). - Fix for reading of datasets that don't support feature counting (#190). - New test dataset (#188). - Fix for encoding error (#191). - Remove confusing warning (#195). - Add data files for binary wheels (#196). - Add control over drivers enabled when reading datasets (#203). - Use cligj for CLI options involving GeoJSON (#204). - Fix fio-info --bounds help (#206). 1.4.8 (2014-11-02) ------------------ - Add missing crs_wkt property as in Rasterio (#182). 1.4.7 (2014-10-28) ------------------ - Fix setting of CRS from EPSG codes (#149). 1.4.6 (2014-10-21) ------------------ - Handle 3D coordinates in bounds() #178. 1.4.5 (2014-10-18) ------------------ - Add --bbox option to fio-cat (#163). - Skip geopackage tests if run from an sdist (#167). - Add fio-bounds and fio-distrib. - Restore fio-dump to working order. 1.4.4 (2014-10-13) ------------------ - Fix accidental requirement on GDAL 1.11 introduced in 1.4.3 (#164). 1.4.3 (2014-10-10) ------------------ - Add support for geopackage format (#160). - Add -f and --format aliases for --driver in CLI (#162). - Add --version option and env command to CLI. 1.4.2 (2014-10-03) ------------------ - --dst-crs and --src-crs options for fio cat and collect (#159). 1.4.1 (2014-09-30) ------------------ - Fix encoding bug in collection's __getitem__ (#153). 1.4.0 (2014-09-22) ------------------ - Add fio cat and fio collect commands (#150). - Return of Python 2.6 compatibility (#148). - Improved CRS support (#149). 1.3.0 (2014-09-17) ------------------ - Add single metadata item accessors to fio inf (#142). - Move fio to setuptools entry point (#142). - Add fio dump and load commands (#143). - Remove fio translate command. 1.2.0 (2014-09-02) ------------------ - Always show property width and precision in schema (#123). - Write datetime properties of features (#125). - Reset spatial filtering in filter() (#129). - Accept datetime.date objects as feature properties (#130). - Add slicing to collection iterators (#132). - Add geometry object masks to collection iterators (#136). - Change source layout to match Shapely and Rasterio (#138). 1.1.6 (2014-07-23) ------------------ - Implement Collection __getitem__() (#112). - Leave GDAL finalization to the DLL's destructor (#113). - Add Collection keys(), values(), items(), __contains__() (#114). - CRS bug fix (#116). - Add fio CLI program. 1.1.5 (2014-05-21) ------------------ - Addition of cpl_errs context manager (#108). - Check for NULLs with '==' test instead of 'is' (#109). - Open auxiliary files with encoding='utf-8' in setup for Python 3 (#110). 1.1.4 (2014-04-03) ------------------ - Convert 'long' in schemas to 'int' (#101). - Carefully map Python schema to the possibly munged internal schema (#105). - Allow writing of features with geometry: None (#71). 1.1.3 (2014-03-23) ------------------ - Always register all GDAL and OGR drivers when entering the DriverManager context (#80, #92). - Skip unsupported field types with a warning (#91). - Allow OGR config options to be passed to fiona.drivers() (#90, #93). - Add a bounds() function (#100). - Turn on GPX driver. 1.1.2 (2014-02-14) ------------------ - Remove collection slice left in dumpgj (#88). 1.1.1 (2014-02-02) ------------------ - Add an interactive file inspector like the one in rasterio. - CRS to_string bug fix (#83). 1.1 (2014-01-22) ---------------- - Use a context manager to manage drivers (#78), a backwards compatible but big change. Fiona is now compatible with rasterio and plays better with the osgeo package. 1.0.3 (2014-01-21) ------------------ - Fix serialization of +init projections (#69). 1.0.2 (2013-09-09) ------------------ - Smarter, better test setup (#65, #66, #67). - Add type='Feature' to records read from a Collection (#68). - Skip geometry validation when using GeoJSON driver (#61). - Dumpgj file description reports record properties as a list (as in dict.items()) instead of a dict. 1.0.1 (2013-08-16) ------------------ - Allow ordering of written fields and preservation of field order when reading (#57). 1.0 (2013-07-30) ----------------- - Add prop_type() function. - Allow UTF-8 encoded paths for Python 2 (#51). For Python 3, paths must always be str, never bytes. - Remove encoding from collection.meta, it's a file creation option only. - Support for linking GDAL frameworks (#54). 0.16.1 (2013-07-02) ------------------- - Add listlayers, open, prop_width to __init__py:__all__. - Reset reading of OGR layer whenever we ask for a collection iterator (#49). 0.16 (2013-06-24) ----------------- - Add support for writing layers to multi-layer files. - Add tests to reach 100% Python code coverage. 0.15 (2013-06-06) ----------------- - Get and set numeric field widths (#42). - Add support for multi-layer data sources (#17). - Add support for zip and tar virtual filesystems (#45). - Add listlayers() function. - Add GeoJSON to list of supported formats (#47). - Allow selection of layers by index or name. 0.14 (2013-05-04) ----------------- - Add option to add JSON-LD in the dumpgj program. - Compare values to six.string_types in Collection constructor. - Add encoding to Collection.meta. - Document dumpgj in README. 0.13 (2013-04-30) ----------------- - Python 2/3 compatibility in a single package. Pythons 2.6, 2.7, 3.3 now supported. 0.12.1 (2013-04-16) ------------------- - Fix messed up linking of README in sdist (#39). 0.12 (2013-04-15) ----------------- - Fix broken installation of extension modules (#35). - Log CPL errors at their matching Python log levels. - Use upper case for encoding names within OGR, lower case in Python. 0.11 (2013-04-14) ----------------- - Cythonize .pyx files (#34). - Work with or around OGR's internal recoding of record data (#35). - Fix bug in serialization of int/float PROJ.4 params. 0.10 (2013-03-23) ----------------- - Add function to get the width of str type properties. - Handle validation and schema representation of 3D geometry types (#29). - Return {'geometry': None} in the case of a NULL geometry (#31). 0.9.1 (2013-03-07) ------------------ - Silence the logger in ogrext.so (can be overridden). - Allow user specification of record field encoding (like 'Windows-1252' for Natural Earth shapefiles) to help when OGR can't detect it. 0.9 (2013-03-06) ---------------- - Accessing file metadata (crs, schema, bounds) on never inspected closed files returns None without exceptions. - Add a dict of supported_drivers and their supported modes. - Raise ValueError for unsupported drivers and modes. - Remove asserts from ogrext.pyx. - Add validate_record method to collections. - Add helpful coordinate system functions to fiona.crs. - Promote use of fiona.open over fiona.collection. - Handle Shapefile's mix of LineString/Polygon and multis (#18). - Allow users to specify width of shapefile text fields (#20). 0.8 (2012-02-21) ---------------- - Replaced .opened attribute with .closed (product of collection() is always opened). Also a __del__() which will close a Collection, but still not to be depended upon. - Added writerecords method. - Added a record buffer and better counting of records in a collection. - Manage one iterator per collection/session. - Added a read-only bounds property. 0.7 (2012-01-29) ---------------- - Initial timezone-naive support for date, time, and datetime fields. Don't use these field types if you can avoid them. RFC 3339 datetimes in a string field are much better. 0.6.2 (2012-01-10) ------------------ - Diagnose and set the driver property of collection in read mode. - Fail if collection paths are not to files. Multi-collection workspaces are a (maybe) TODO. 0.6.1 (2012-01-06) ------------------ - Handle the case of undefined crs for disk collections. 0.6 (2012-01-05) ---------------- - Support for collection coordinate reference systems based on Proj4. - Redirect OGR warnings and errors to the Fiona log. - Assert that pointers returned from the ograpi functions are not NULL before using. 0.5 (2011-12-19) ---------------- - Support for reading and writing collections of any geometry type. - Feature and Geometry classes replaced by mappings (dicts). - Removal of Workspace class. 0.2 (2011-09-16) ---------------- - Rename WorldMill to Fiona. 0.1.1 (2008-12-04) ------------------ - Support for features with no geometry. Fiona-1.7.10/CODE_OF_CONDUCT.md000066400000000000000000000036751317446052300154530ustar00rootroot00000000000000# Contributor Code of Conduct As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery * Personal attacks * Trolling or insulting/derogatory comments * Public or private harassment * Publishing other's private information, such as physical or electronic addresses, without explicit permission * Other unethical or unprofessional conduct. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) Fiona-1.7.10/CREDITS.txt000066400000000000000000000027271317446052300145070ustar00rootroot00000000000000Credits ======= Fiona is written by: - Sean Gillies - Rene Buffat - Kevin Wurster - Micah Cochran - Matthew Perry - Joshua Arnott - Kelsey Jordahl - Patrick Young - Simon Norris - Hannes Gräuler - Johan Van de Wauw - Jacob Wasserman - Ryan Grout - Michael Weisman - fredj - Bas Couwenberg - Brendan Ward - Michele Citterio - Miro Hrončok - qinfeng - Michael Weisman - Brandon Liu - Ludovic Delauné - Martijn Visser - Ariel Nunez - Oliver Tonnhofer - Stefano Costa - dimlev - wilsaj - Jesse Crocker Fiona would not be possible without the great work of Frank Warmerdam and other GDAL/OGR developers. Some portions of this work were supported by a grant (for Pleiades_) from the U.S. National Endowment for the Humanities (http://www.neh.gov). .. _Pleiades: http://pleiades.stoa.org Fiona-1.7.10/LICENSE.txt000066400000000000000000000027571317446052300144770ustar00rootroot00000000000000 Copyright (c) 2007, Sean C. Gillies All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Sean C. Gillies nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Fiona-1.7.10/MANIFEST.in000066400000000000000000000006311317446052300143770ustar00rootroot00000000000000global-exclude .DS_Store global-exclude *.pyc recursive-exclude docs/data * recursive-exclude docs/_build * exclude MANIFEST.in exclude *.txt *.py recursive-include docs *.rst *.txt recursive-include tests *.py recursive-include tests/data * include fiona/*.c fiona/*.cpp exclude fiona/ogrext.c include CHANGES.txt CREDITS.txt LICENSE.txt VERSION.txt README.rst include benchmark.py setup.py requirements.txt Fiona-1.7.10/README.rst000066400000000000000000000272631317446052300143420ustar00rootroot00000000000000===== Fiona ===== Fiona is OGR's neat, nimble, no-nonsense API for Python programmers. .. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master :target: https://travis-ci.org/Toblerity/Fiona .. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.png :target: https://coveralls.io/r/Toblerity/Fiona Fiona is designed to be simple and dependable. It focuses on reading and writing data in standard Python IO style and relies upon familiar Python types and protocols such as files, dictionaries, mappings, and iterators instead of classes specific to OGR. Fiona can read and write real-world data using multi-layered GIS formats and zipped virtual file systems and integrates readily with other Python GIS packages such as pyproj_, Rtree_, and Shapely_. For more details, see: * Fiona `home page `__ * Fiona `docs and manual `__ * Fiona `examples `__ Usage ===== Collections ----------- Records are read from and written to ``file``-like `Collection` objects returned from the ``fiona.open()`` function. Records are mappings modeled on the GeoJSON format. They don't have any spatial methods of their own, so if you want to do anything fancy with them you will probably need Shapely or something like it. Here is an example of using Fiona to read some records from one data file, change their geometry attributes, and write them to a new data file. .. code-block:: python import fiona # Register format drivers with a context manager with fiona.drivers(): # Open a file for reading. We'll call this the "source." with fiona.open('tests/data/coutwildrnp.shp') as source: # The file we'll write to, the "sink", must be initialized # with a coordinate system, a format driver name, and # a record schema. We can get initial values from the open # collection's ``meta`` property and then modify them as # desired. meta = source.meta meta['schema']['geometry'] = 'Point' # Open an output file, using the same format driver and # coordinate reference system as the source. The ``meta`` # mapping fills in the keyword parameters of fiona.open(). with fiona.open('test_write.shp', 'w', **meta) as sink: # Process only the records intersecting a box. for f in source.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): # Get a point on the boundary of the record's # geometry. f['geometry'] = { 'type': 'Point', 'coordinates': f['geometry']['coordinates'][0][0]} # Write the record out. sink.write(f) # The sink's contents are flushed to disk and the file is # closed when its ``with`` block ends. This effectively # executes ``sink.flush(); sink.close()``. # At the end of the ``with fiona.drivers()`` block, context # manager exits and all drivers are de-registered. The fiona.drivers() function and context manager are new in 1.1. The example above shows the way to use it to register and de-register drivers in a deterministic and efficient way. Code written for Fiona 1.0 will continue to work: opened collections may manage the global driver registry if no other manager is present. Reading Multilayer data ----------------------- Collections can also be made from single layers within multilayer files or directories of data. The target layer is specified by name or by its integer index within the file or directory. The ``fiona.listlayers()`` function provides an index ordered list of layer names. .. code-block:: python with fiona.drivers(): for layername in fiona.listlayers('tests/data'): with fiona.open('tests/data', layer=layername) as src: print(layername, len(src)) # Output: # (u'coutwildrnp', 67) Layer can also be specified by index. In this case, ``layer=0`` and ``layer='test_uk'`` specify the same layer in the data file or directory. .. code-block:: python with fiona.drivers(): for i, layername in enumerate(fiona.listlayers('tests/data')): with fiona.open('tests/data', layer=i) as src: print(i, layername, len(src)) # Output: # (0, u'coutwildrnp', 67) Writing Multilayer data ----------------------- Multilayer data can be written as well. Layers must be specified by name when writing. .. code-block:: python with fiona.drivers(): with open('tests/data/cowildrnp.shp') as src: meta = src.meta f = next(src) with fiona.open('/tmp/foo', 'w', layer='bar', **meta) as dst: dst.write(f) print(fiona.listlayers('/tmp/foo')) with fiona.open('/tmp/foo', layer='bar') as src: print(len(src)) f = next(src) print(f['geometry']['type']) print(f['properties']) # Output: # [u'bar'] # 1 # Polygon # OrderedDict([(u'PERIMETER', 1.22107), (u'FEATURE2', None), (u'NAME', u'Mount Naomi Wilderness'), (u'FEATURE1', u'Wilderness'), (u'URL', u'http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi'), (u'AGBUR', u'FS'), (u'AREA', 0.0179264), (u'STATE_FIPS', u'49'), (u'WILDRNP020', 332), (u'STATE', u'UT')]) A view of the /tmp/foo directory will confirm the creation of the new files. .. code-block:: console $ ls /tmp/foo bar.cpg bar.dbf bar.prj bar.shp bar.shx Collections from archives and virtual file systems -------------------------------------------------- Zip and Tar archives can be treated as virtual filesystems and Collections can be made from paths and layers within them. In other words, Fiona lets you read and write zipped Shapefiles. .. code-block:: python with fiona.drivers(): for i, layername in enumerate( fiona.listlayers( '/', vfs='zip://tests/data/coutwildrnp.zip')): with fiona.open( '/', vfs='zip://tests/data/coutwildrnp.zip', layer=i) as src: print(i, layername, len(src)) # Output: # (0, u'coutwildrnp', 67) Fiona CLI ========= Fiona's command line interface, named "fio", is documented at `docs/cli.rst `__. Its ``fio info`` pretty prints information about a data file. .. code-block:: console $ fio info --indent 2 tests/data/coutwildrnp.shp { "count": 67, "crs": "EPSG:4326", "driver": "ESRI Shapefile", "bounds": [ -113.56424713134766, 37.0689811706543, -104.97087097167969, 41.99627685546875 ], "schema": { "geometry": "Polygon", "properties": { "PERIMETER": "float:24.15", "FEATURE2": "str:80", "NAME": "str:80", "FEATURE1": "str:80", "URL": "str:101", "AGBUR": "str:80", "AREA": "float:24.15", "STATE_FIPS": "str:80", "WILDRNP020": "int:10", "STATE": "str:80" } } } Installation ============ Fiona requires Python 2.6, 2.7, 3.3, or 3.4 and GDAL/OGR 1.8+. To build from a source distribution you will need a C compiler and GDAL and Python development headers and libraries (libgdal1-dev for Debian/Ubuntu, gdal-dev for CentOS/Fedora). To build from a repository copy, you will also need Cython to build C sources from the project's .pyx files. See the project's requirements-dev.txt file for guidance. The `Kyngchaos GDAL frameworks `__ will satisfy the GDAL/OGR dependency for OS X, as will Homebrew's GDAL Formula (``brew install gdal``). Python Requirements ------------------- Fiona depends on the modules ``six``, ``cligj``, ``munch``, ``argparse``, and ``ordereddict`` (the two latter modules are standard in Python 2.7+). Pip will fetch these requirements for you, but users installing Fiona from a Windows installer must get them separately. Unix-like systems ----------------- Assuming you're using a virtualenv (if not, skip to the 4th command) and GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well known locations on your system via your system's package manager (``brew install gdal`` using Homebrew on OS X), installation is this simple. .. code-block:: console $ mkdir fiona_env $ virtualenv fiona_env $ source fiona_env/bin/activate (fiona_env)$ pip install fiona If gdal-config is not available or if GDAL/OGR headers and libs aren't installed to a well known location, you must set include dirs, library dirs, and libraries options via the setup.cfg file or setup command line as shown below (using ``git``). You must also specify the major version of the GDAL API (1 or 2) on the setup command line. .. code-block:: console (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal install --gdalversion 1 Or specify that build options and GDAL API version should be provided by a particular gdal-config program. .. code-block:: console (fiona_env)$ GDAL_CONFIG=/path/to/gdal-config pip install fiona Windows ------- Binary installers are available at http://www.lfd.uci.edu/~gohlke/pythonlibs/#fiona and coming eventually to PyPI. You can download a binary distribution of GDAL from `here `_. You will also need to download the compiled libraries and headers (include files). When building from source on Windows, it is important to know that setup.py cannot rely on gdal-config, which is only present on UNIX systems, to discover the locations of header files and libraries that Fiona needs to compile its C extensions. On Windows, these paths need to be provided by the user. You will need to find the include files and the library files for gdal and use setup.py as follows. You must also specify the major version of the GDAL API (1 or 2) on the setup command line. .. code-block:: console $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 1 Note: The GDAL dll (gdal111.dll) and gdal-data directory need to be in your Windows PATH otherwise Fiona will fail to work. Development and testing ======================= Building from the source requires Cython. Tests require Nose. If the GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well known locations on your system (via your system's package manager), you can do this:: (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ pip install cython (fiona_env)$ pip install -e .[test] (fiona_env)$ nosetests Or you can use the ``pep-518-install`` script:: (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona (fiona_env)$ ./pep-518-install If you have a non-standard environment, you'll need to specify the include and lib dirs and GDAL library on the command line:: (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2 develop (fiona_env)$ nosetests .. _OGR: http://www.gdal.org/ogr .. _pyproj: http://pypi.python.org/pypi/pyproj/ .. _Rtree: http://pypi.python.org/pypi/Rtree/ .. _Shapely: http://pypi.python.org/pypi/Shapely/ .. _gdal-config: http://www.gdal.org/gdal-config.html Fiona-1.7.10/appveyor.yml000066400000000000000000000072441317446052300152400ustar00rootroot00000000000000# Based on appveyor.yml from https://github.com/PDAL/PDAL and https://github.com/ogrisel/python-appveyor-demo # platform: x64 environment: global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter # See: http://stackoverflow.com/a/13751649/163740 CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd" GDAL_HOME: "C:\\gdal" matrix: # - PYTHON: "C:\\Python27.10-x64" # PYTHON_VERSION: "2.7.10" # PYTHON_ARCH: "64" - PYTHON: "C:\\Python34-x64" PYTHON_VERSION: "3.4.3" PYTHON_ARCH: "64" install: - ECHO "Filesystem root:" - ps: "ls \"C:/\"" - ECHO "Installed SDKs:" - ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\"" # Install Python (from the official .msi of http://python.org) and pip when # not already installed. # - ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 } # Prepend newly installed Python to the PATH of this build (this cannot be # done from inside the powershell script as it would require to restart # the parent CMD process). - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH%" # Check that we have the expected version and architecture for Python - "python --version" - "python -c \"import struct; print(struct.calcsize('P') * 8)\"" # https://code.google.com/p/pymat2/wiki/WindowsTips #- ps: (Get-Content "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat) | ForEach-Object { $_ -replace "vcvarsamd64.bat", "vcvars64.bat" } | Set-Content "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat # - '%CMD_IN_ENV% echo "conv env"' # - ps: 'ls "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin"' # - if "%platform%" == "x86" call "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat" #- if "%platform%" == "x64" echo f | xcopy /f /y "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\amd64\vcvarsamd64.bat" # - if "%platform%" == "x64" call "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat" amd64 # - if "%platform%" == "x64" set WIN64_ARG="WIN64=YES" - ps: mkdir C:\build | out-null - ps: mkdir C:\gdal | out-null - curl http://download.gisinternals.com/sdk/downloads/release-1600-x64-gdal-1-11-4-mapserver-6-4-3.zip --output gdalbin.zip - 7z x gdalbin.zip -oC:\gdal - curl http://download.gisinternals.com/sdk/downloads/release-1600-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip --output gdallibs.zip - 7z x gdallibs.zip -oC:\gdal - "SET PATH=C:\\gdal;C:\\gdal\\bin;C:\\gdal\\data;C:\\gdal\\bin\\gdal\\apps;%PATH%" - "SET GDAL_DATA=C:\\gdal\\bin\\gdal-data" - ECHO "Filesystem C:/GDAL:" - ps: "ls \"C:/GDAL\"" - cd C:\projects\fiona # Upgrade to the latest version of pip to avoid it displaying warnings # about it being out of date. # - "python -m pip install --disable-pip-version-check --user --upgrade pip" - pip --version # Install the build dependencies of the project. If some dependencies contain # compiled extensions and are not provided as pre-built wheel packages, # pip will build them from source using the MSVC compiler matching the # target Python version and architecture - "%CMD_IN_ENV% pip install -r requirements-dev.txt" build_script: # Build the compiled extension - cmd: echo %PATH% - cmd: echo %PYTHONPATH% - "%CMD_IN_ENV% python setup.py build_ext -IC:\\gdal\\include -lgdal_i -LC:\\gdal\\lib install --gdalversion 1.11.4" test_script: # Run the project tests - cmd: SET - "%CMD_IN_ENV% nosetests --exclude test_filter_vsi --exclude test_geopackage" Fiona-1.7.10/appveyor/000077500000000000000000000000001317446052300145065ustar00rootroot00000000000000Fiona-1.7.10/appveyor/install.ps1000066400000000000000000000160331317446052300166040ustar00rootroot00000000000000# Sample script to install Python and pip under Windows # Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer # License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ $MINICONDA_URL = "http://repo.continuum.io/miniconda/" $BASE_URL = "https://www.python.org/ftp/python/" $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" $GET_PIP_PATH = "C:\get-pip.py" $PYTHON_PRERELEASE_REGEX = @" (?x) (?\d+) \. (?\d+) \. (?\d+) (?[a-z]{1,2}\d+) "@ function Download ($filename, $url) { $webclient = New-Object System.Net.WebClient $basedir = $pwd.Path + "\" $filepath = $basedir + $filename if (Test-Path $filename) { Write-Host "Reusing" $filepath return $filepath } # Download and retry up to 3 times in case of network transient errors. Write-Host "Downloading" $filename "from" $url $retry_attempts = 2 for ($i = 0; $i -lt $retry_attempts; $i++) { try { $webclient.DownloadFile($url, $filepath) break } Catch [Exception]{ Start-Sleep 1 } } if (Test-Path $filepath) { Write-Host "File saved at" $filepath } else { # Retry once to get the error message if any at the last try $webclient.DownloadFile($url, $filepath) } return $filepath } function ParsePythonVersion ($python_version) { if ($python_version -match $PYTHON_PRERELEASE_REGEX) { return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro, $matches.prerelease) } $version_obj = [version]$python_version return ($version_obj.major, $version_obj.minor, $version_obj.build, "") } function DownloadPython ($python_version, $platform_suffix) { $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version if (($major -le 2 -and $micro -eq 0) ` -or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) ` ) { $dir = "$major.$minor" $python_version = "$major.$minor$prerelease" } else { $dir = "$major.$minor.$micro" } if ($prerelease) { if (($major -le 2) ` -or ($major -eq 3 -and $minor -eq 1) ` -or ($major -eq 3 -and $minor -eq 2) ` -or ($major -eq 3 -and $minor -eq 3) ` ) { $dir = "$dir/prev" } } if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) { $ext = "msi" if ($platform_suffix) { $platform_suffix = ".$platform_suffix" } } else { $ext = "exe" if ($platform_suffix) { $platform_suffix = "-$platform_suffix" } } $filename = "python-$python_version$platform_suffix.$ext" $url = "$BASE_URL$dir/$filename" $filepath = Download $filename $url return $filepath } function InstallPython ($python_version, $architecture, $python_home) { Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home if (Test-Path $python_home) { Write-Host $python_home "already exists, skipping." return $false } if ($architecture -eq "32") { $platform_suffix = "" } else { $platform_suffix = "amd64" } $installer_path = DownloadPython $python_version $platform_suffix $installer_ext = [System.IO.Path]::GetExtension($installer_path) Write-Host "Installing $installer_path to $python_home" $install_log = $python_home + ".log" if ($installer_ext -eq '.msi') { InstallPythonMSI $installer_path $python_home $install_log } else { InstallPythonEXE $installer_path $python_home $install_log } if (Test-Path $python_home) { Write-Host "Python $python_version ($architecture) installation complete" } else { Write-Host "Failed to install Python in $python_home" Get-Content -Path $install_log Exit 1 } } function InstallPythonEXE ($exepath, $python_home, $install_log) { $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home" RunCommand $exepath $install_args } function InstallPythonMSI ($msipath, $python_home, $install_log) { $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home" $uninstall_args = "/qn /x $msipath" RunCommand "msiexec.exe" $install_args if (-not(Test-Path $python_home)) { Write-Host "Python seems to be installed else-where, reinstalling." RunCommand "msiexec.exe" $uninstall_args RunCommand "msiexec.exe" $install_args } } function RunCommand ($command, $command_args) { Write-Host $command $command_args Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru } function InstallPip ($python_home) { $pip_path = $python_home + "\Scripts\pip.exe" $python_path = $python_home + "\python.exe" if (-not(Test-Path $pip_path)) { Write-Host "Installing pip..." $webclient = New-Object System.Net.WebClient $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) Write-Host "Executing:" $python_path $GET_PIP_PATH & $python_path $GET_PIP_PATH } else { Write-Host "pip already installed." } } function DownloadMiniconda ($python_version, $platform_suffix) { if ($python_version -eq "3.4") { $filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe" } else { $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe" } $url = $MINICONDA_URL + $filename $filepath = Download $filename $url return $filepath } function InstallMiniconda ($python_version, $architecture, $python_home) { Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home if (Test-Path $python_home) { Write-Host $python_home "already exists, skipping." return $false } if ($architecture -eq "32") { $platform_suffix = "x86" } else { $platform_suffix = "x86_64" } $filepath = DownloadMiniconda $python_version $platform_suffix Write-Host "Installing" $filepath "to" $python_home $install_log = $python_home + ".log" $args = "/S /D=$python_home" Write-Host $filepath $args Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru if (Test-Path $python_home) { Write-Host "Python $python_version ($architecture) installation complete" } else { Write-Host "Failed to install Python in $python_home" Get-Content -Path $install_log Exit 1 } } function InstallMinicondaPip ($python_home) { $pip_path = $python_home + "\Scripts\pip.exe" $conda_path = $python_home + "\Scripts\conda.exe" if (-not(Test-Path $pip_path)) { Write-Host "Installing pip..." $args = "install --yes pip" Write-Host $conda_path $args Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru } else { Write-Host "pip already installed." } } function main () { InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON InstallPip $env:PYTHON } main Fiona-1.7.10/appveyor/run_with_env.cmd000066400000000000000000000064461317446052300177140ustar00rootroot00000000000000:: To build extensions for 64 bit Python 3, we need to configure environment :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) :: :: To build extensions for 64 bit Python 2, we need to configure environment :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) :: :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific :: environment configurations. :: :: Note: this script needs to be run with the /E:ON and /V:ON flags for the :: cmd interpreter, at least for (SDK v7.0) :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows :: http://stackoverflow.com/a/13751649/163740 :: :: Author: Olivier Grisel :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ :: :: Notes about batch files for Python people: :: :: Quotes in values are literally part of the values: :: SET FOO="bar" :: FOO is now five characters long: " b a r " :: If you don't want quotes, don't include them on the right-hand side. :: :: The CALL lines at the end of this file look redundant, but if you move them :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y :: case, I don't know why. @ECHO OFF SET COMMAND_TO_RUN=%* SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf :: Extract the major and minor versions, and allow for the minor version to be :: more than 9. This requires the version number to have two dots in it. SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1% IF "%PYTHON_VERSION:~3,1%" == "." ( SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1% ) ELSE ( SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2% ) :: Based on the Python version, determine what SDK version to use, and whether :: to set the SDK for 64-bit. IF %MAJOR_PYTHON_VERSION% == 2 ( SET WINDOWS_SDK_VERSION="v7.0" SET SET_SDK_64=Y ) ELSE ( IF %MAJOR_PYTHON_VERSION% == 3 ( SET WINDOWS_SDK_VERSION="v7.1" IF %MINOR_PYTHON_VERSION% LEQ 4 ( SET SET_SDK_64=Y ) ELSE ( SET SET_SDK_64=N IF EXIST "%WIN_WDK%" ( :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/ REN "%WIN_WDK%" 0wdf ) ) ) ELSE ( ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" EXIT 1 ) ) IF %PYTHON_ARCH% == 64 ( IF %SET_SDK_64% == Y ( ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture SET DISTUTILS_USE_SDK=1 SET MSSdk=1 "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) ELSE ( ECHO Using default MSVC build environment for 64 bit architecture ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) ) ELSE ( ECHO Using default MSVC build environment for 32 bit architecture ECHO Executing: %COMMAND_TO_RUN% call %COMMAND_TO_RUN% || EXIT 1 ) Fiona-1.7.10/benchmark-max.py000066400000000000000000000022741317446052300157350ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) schema = [] ldefn = layer.GetLayerDefn() for n in range(ldefn.GetFieldCount()): fdefn = ldefn.GetFieldDefn(n) schema.append((fdefn.name, fdefn.type)) for feature in layer: id = feature.GetFID() props = {} for i in range(feature.GetFieldCount()): props[schema[i][0]] = feature.GetField(i) coordinates = [] for part in feature.GetGeometryRef(): ring = [] for i in range(part.GetPointCount()): xy = part.GetPoint(i) ring.append(xy) coordinates.append(ring) source.Destroy() """ print "osgeo.ogr 1.7.2 (maximum)" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.7.10/benchmark-min.py000066400000000000000000000013121317446052300157230ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) for feature in layer: id = feature.GetFID() source.Destroy() """ print "osgeo.ogr 1.7.2 (minimum)" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.7.10/benchmark.py000066400000000000000000000021311317446052300151420ustar00rootroot00000000000000 import timeit from fiona import collection from osgeo import ogr PATH = 'docs/data/test_uk.shp' NAME = 'test_uk' # Fiona s = """ with collection(PATH, "r") as c: for f in c: id = f["id"] """ t = timeit.Timer( stmt=s, setup='from __main__ import collection, PATH, NAME' ) print "Fiona 0.5" print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) print # OGR s = """ source = ogr.Open(PATH) layer = source.GetLayerByName(NAME) schema = [] ldefn = layer.GetLayerDefn() for n in range(ldefn.GetFieldCount()): fdefn = ldefn.GetFieldDefn(n) schema.append((fdefn.name, fdefn.type)) layer.ResetReading() while 1: feature = layer.GetNextFeature() if not feature: break id = feature.GetFID() props = {} for i in range(feature.GetFieldCount()): props[schema[i][0]] = feature.GetField(i) geometry = feature.GetGeometryRef() feature.Destroy() source.Destroy() """ print "osgeo.ogr 1.7.2" t = timeit.Timer( stmt=s, setup='from __main__ import ogr, PATH, NAME' ) print "%.2f usec/pass" % (1000000 * t.timeit(number=1000)/1000) Fiona-1.7.10/docs/000077500000000000000000000000001317446052300135715ustar00rootroot00000000000000Fiona-1.7.10/docs/Makefile000066400000000000000000000130621317446052300152330ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: apidocs $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Fiona.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Fiona.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Fiona" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Fiona" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." apidocs: sphinx-apidoc -f -o . ../fiona @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." Fiona-1.7.10/docs/README.rst000066400000000000000000000001251317446052300152560ustar00rootroot00000000000000.. include:: ../README.rst .. include:: ../CHANGES.txt .. include:: ../CREDITS.txt Fiona-1.7.10/docs/cli.rst000066400000000000000000000216461317446052300151030ustar00rootroot00000000000000Command Line Interface ====================== Fiona's new command line interface is a program named "fio". .. code-block:: console Usage: fio [OPTIONS] COMMAND [ARGS]... Fiona command line interface. Options: -v, --verbose Increase verbosity. -q, --quiet Decrease verbosity. --version Show the version and exit. --help Show this message and exit. Commands: bounds Print the extent of GeoJSON objects buffer Buffer geometries on all sides by a fixed distance. cat Concatenate and print the features of datasets collect Collect a sequence of features. distrib Distribute features from a collection dump Dump a dataset to GeoJSON. env Print information about the fio environment. filter Filter GeoJSON features by python expression info Print information about a dataset. insp Open a dataset and start an interpreter. load Load GeoJSON to a dataset in another format. It is developed using the ``click`` package and is new in 1.1.6. bounds ------ New in 1.4.5. Fio-bounds reads LF or RS-delimited GeoJSON texts, either features or collections, from stdin and prints their bounds with or without other data to stdout. With no options, it works like this: .. code-block:: console $ fio cat docs/data/test_uk.shp | head -n 1 \ > | fio bounds [0.735, 51.357216, 0.947778, 51.444717] Using ``--with-id`` gives you .. code-block:: console $ fio cat docs/data/test_uk.shp | head -n 1 \ > | fio bounds --with-id {"id": "0", "bbox": [0.735, 51.357216, 0.947778, 51.444717]} cat --- The cat command concatenates the features of one or more datasets and prints them as a `JSON text sequence `__ of features. In other words: GeoJSON feature objects, possibly pretty printed, optionally separated by ASCII RS (\x1e) chars using `--rs`. The output of ``fio cat`` can be piped to ``fio load`` to create new concatenated datasets. .. code-block:: console $ fio cat docs/data/test_uk.shp docs/data/test_uk.shp \ > | fio load /tmp/double.shp --driver Shapefile $ fio info /tmp/double.shp --count 96 $ fio info docs/data/test_uk.shp --count 48 New in 1.4.0. collect ------- The collect command takes a JSON text sequence of GeoJSON feature objects, such as the output of ``fio cat`` and writes a GeoJSON feature collection. .. code-block:: console $ fio cat docs/data/test_uk.shp docs/data/test_uk.shp \ > | fio collect > /tmp/collected.json $ fio info /tmp/collected.json --count 96 New in 1.4.0. distrib ------- The inverse of fio-collect, fio-distrib takes a GeoJSON feature collection and writes a JSON text sequence of GeoJSON feature objects. .. code-block:: console $ fio info --count tests/data/coutwildrnp.shp 67 $ fio cat tests/data/coutwildrnp.shp | fio collect | fio distrib | wc -l 67 New in 1.4.0. dump ---- The dump command reads a vector dataset and writes a GeoJSON feature collection to stdout. Its output can be piped to ``rio load`` (see below). .. code-block:: console $ fio dump docs/data/test_uk.shp --indent 2 --precision 2 | head { "features": [ { "geometry": { "coordinates": [ [ [ 0.9, 51.36 ], You can optionally dump out JSON text sequences using ``--x-json-seq``. Since version 1.4.0, ``fio cat`` is the better tool for generating sequences. .. code-block:: console $ fio dump docs/data/test_uk.shp --precision 2 --x-json-seq | head -n 2 {"geometry": {"coordinates": [[[0.9, 51.36], [0.89, 51.36], [0.79, 51.37], [0.78, 51.37], [0.77, 51.38], [0.76, 51.38], [0.75, 51.39], [0.74, 51.4], [0.73, 51.41], [0.74, 51.43], [0.75, 51.44], [0.76, 51.44], [0.79, 51.44], [0.89, 51.42], [0.9, 51.42], [0.91, 51.42], [0.93, 51.4], [0.94, 51.39], [0.94, 51.38], [0.95, 51.38], [0.95, 51.37], [0.95, 51.37], [0.94, 51.37], [0.9, 51.36], [0.9, 51.36]]], "type": "Polygon"}, "id": "0", "properties": {"AREA": 244820.0, "CAT": 232.0, "CNTRY_NAME": "United Kingdom", "FIPS_CNTRY": "UK", "POP_CNTRY": 60270708.0}, "type": "Feature"} {"geometry": {"coordinates": [[[-4.66, 51.16], [-4.67, 51.16], [-4.67, 51.16], [-4.67, 51.17], [-4.67, 51.19], [-4.67, 51.19], [-4.67, 51.2], [-4.66, 51.2], [-4.66, 51.19], [-4.65, 51.16], [-4.65, 51.16], [-4.65, 51.16], [-4.66, 51.16]]], "type": "Polygon"}, "id": "1", "properties": {"AREA": 244820.0, "CAT": 232.0, "CNTRY_NAME": "United Kingdom", "FIPS_CNTRY": "UK", "POP_CNTRY": 60270708.0}, "type": "Feature"} info ---- The info command prints information about a dataset as a JSON object. .. code-block:: console $ fio info docs/data/test_uk.shp --indent 2 { "count": 48, "crs": "+datum=WGS84 +no_defs +proj=longlat", "driver": "ESRI Shapefile", "bounds": [ -8.621389, 49.911659, 1.749444, 60.844444 ], "schema": { "geometry": "Polygon", "properties": { "CAT": "float:16", "FIPS_CNTRY": "str:80", "CNTRY_NAME": "str:80", "AREA": "float:15.2", "POP_CNTRY": "float:15.2" } } } You can process this JSON using, e.g., `underscore-cli `__. .. code-block:: console $ fio info docs/data/test_uk.shp | underscore extract count 48 You can also optionally get single info items as plain text (not JSON) strings .. code-block:: console $ fio info docs/data/test_uk.shp --count 48 $ fio info docs/data/test_uk.shp --bounds -8.621389 49.911659 1.749444 60.844444 load ---- The load command reads GeoJSON features from stdin and writes them to a vector dataset using another format. .. code-block:: console $ fio dump docs/data/test_uk.shp \ > | fio load /tmp/test.shp --driver Shapefile This command also supports GeoJSON text sequences. RS-separated sequences will be detected. If you want to load LF-separated sequences, you must specfiy ``--x-json-seq``. .. code-block:: console $ fio cat docs/data/test_uk.shp | fio load /tmp/foo.shp --driver Shapefile $ fio info /tmp/foo.shp --indent 2 { "count": 48, "crs": "+datum=WGS84 +no_defs +proj=longlat", "driver": "ESRI Shapefile", "bounds": [ -8.621389, 49.911659, 1.749444, 60.844444 ], "schema": { "geometry": "Polygon", "properties": { "AREA": "float:24.15", "CNTRY_NAME": "str:80", "POP_CNTRY": "float:24.15", "FIPS_CNTRY": "str:80", "CAT": "float:24.15" } } } The underscore-cli process command is another way of turning a GeoJSON feature collection into a feature sequence. .. code-block:: console $ fio dump docs/data/test_uk.shp \ > | underscore process \ > 'each(data.features,function(o){console.log(JSON.stringify(o))})' \ > | fio load /tmp/test-seq.shp --x-json-seq --driver Shapefile filter ------ The filter command reads GeoJSON features from stdin and writes the feature to stdout *if* the provided expression evalutates to `True` for that feature. The python expression is evaluated in a restricted namespace containing 3 functions (`sum`, `min`, `max`), the `math` module, the shapely `shape` function, and an object `f` representing the feature to be evaluated. This `f` object allows access in javascript-style dot notation for convenience. If the expression evaluates to a "truthy" value, the feature is printed verbatim. Otherwise, the feature is excluded from the output. For example fio cat data.shp \ | fio filter "f.properties.area > 1000.0" \ | fio collect > large_polygons.geojson Would create a geojson file with only those features from `data.shp` where the area was over a given threshold. Coordinate Reference System Transformations ------------------------------------------- The ``fio cat`` command can optionally transform feature geometries to a new coordinate reference system specified with ``--dst_crs``. The ``fio collect`` command can optionally transform from a coordinate reference system specified with ``--src_crs`` to the default WGS84 GeoJSON CRS. Like collect, ``fio load`` can accept non-WGS84 features, but as it can write files in formats other than GeoJSON, you can optionally specify a ``--dst_crs``. For example, the WGS84 features read from docs/data/test_uk.shp, .. code-block:: console $ fio cat docs/data/test_uk.shp --dst_crs EPSG:3857 \ > | fio collect --src_crs EPSG:3857 > /tmp/foo.json make a detour through EPSG:3857 (Web Mercator) and are transformed back to WGS84 by fio cat. The following, .. code-block:: console $ fio cat docs/data/test_uk.shp --dst_crs EPSG:3857 \ > | fio load --src_crs EPSG:3857 --dst_crs EPSG:4326 --driver Shapefile \ > /tmp/foo.shp does the same thing, but for ESRI Shapefile output. New in 1.4.2. Fiona-1.7.10/docs/conf.py000066400000000000000000000215001317446052300150660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Fiona documentation build configuration file, created by # sphinx-quickstart on Mon Dec 26 12:16:26 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import fiona import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Fiona' copyright = u'2011, Sean Gillies' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = fiona.__version__ # The full version, including alpha/beta/rc tags. release = fiona.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' html_theme = 'sphinxdoc' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Fionadoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Fiona.tex', u'Fiona Documentation', u'Sean Gillies', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'fiona', u'Fiona Documentation', [u'Sean Gillies'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Fiona', u'Fiona Documentation', u'Sean Gillies', 'Fiona', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'Fiona' epub_author = u'Sean Gillies' epub_publisher = u'Sean Gillies' epub_copyright = u'2011, Sean Gillies' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True Fiona-1.7.10/docs/encoding.txt000066400000000000000000000036251317446052300161260ustar00rootroot00000000000000========================= Fiona and String Encoding ========================= Reading ------- With Fiona, all 'str' type record attributes are unicode strings. The source data is encoded in some way. It might be a standard encoding (ISO-8859-1 or UTF-8) or it might be a format-specific encoding. How do we get from encoded strings to Python unicode? :: encoded File | (decode?) OGR (encode?) | (decode) Fiona E_f R E_i The internal encoding `E_i` is used by the ``FeatureBuilder`` class to create Fiona's record dicts. `E_f` is the encoding of the data file. `R` is ``True`` if OGR is recoding record attribute values to UTF-8 (a recent feature that isn't implemented for all format drivers, hence the question marks in the sketch above), else ``False``. The value of E_i is determined like this:: E_i = (R and 'utf-8') or E_f In the real world of sloppy data, we may not know the exact encoding of the data file. Fiona's best guess at it is this:: E_f = E_u or (R and E_o) or (S and 'iso-8859-1') or E_p `E_u`, here, is any encoding provided by the programmer (through the ``Collection`` constructor). `E_o` is an encoding detected by OGR (which doesn't provide an API to get the detected encoding). `S` is ``True`` if the file is a Shapefile (because that's the format default). `E_p` is locale.getpreferredencoding(). Bottom line: if you know that your data file has an encoding other than ISO-8859-1, specify it. If you don't know what the encoding is, you can let the format driver try to figure it out (Requires GDAL 1.9.1+). Writing ------- On the writing side:: Fiona (encode) | (decode?) OGR (encode?) | encoded File E_i R E_f We derive `E_i` from `R` and `E_f` again as above. `E_f` is:: E_f = E_u or (S and 'iso-8859-1') or E_p Appending --------- The diagram is the same as above, but `E_f` is as in the Reading section. Fiona-1.7.10/docs/fiona.fio.rst000066400000000000000000000042571317446052300162030ustar00rootroot00000000000000fiona.fio package ================= Submodules ---------- fiona.fio.bounds module ----------------------- .. automodule:: fiona.fio.bounds :members: :undoc-members: :show-inheritance: fiona.fio.calc module --------------------- .. automodule:: fiona.fio.calc :members: :undoc-members: :show-inheritance: fiona.fio.cat module -------------------- .. automodule:: fiona.fio.cat :members: :undoc-members: :show-inheritance: fiona.fio.collect module ------------------------ .. automodule:: fiona.fio.collect :members: :undoc-members: :show-inheritance: fiona.fio.distrib module ------------------------ .. automodule:: fiona.fio.distrib :members: :undoc-members: :show-inheritance: fiona.fio.dump module --------------------- .. automodule:: fiona.fio.dump :members: :undoc-members: :show-inheritance: fiona.fio.env module -------------------- .. automodule:: fiona.fio.env :members: :undoc-members: :show-inheritance: fiona.fio.filter module ----------------------- .. automodule:: fiona.fio.filter :members: :undoc-members: :show-inheritance: fiona.fio.helpers module ------------------------ .. automodule:: fiona.fio.helpers :members: :undoc-members: :show-inheritance: fiona.fio.info module --------------------- .. automodule:: fiona.fio.info :members: :undoc-members: :show-inheritance: fiona.fio.insp module --------------------- .. automodule:: fiona.fio.insp :members: :undoc-members: :show-inheritance: fiona.fio.load module --------------------- .. automodule:: fiona.fio.load :members: :undoc-members: :show-inheritance: fiona.fio.ls module ------------------- .. automodule:: fiona.fio.ls :members: :undoc-members: :show-inheritance: fiona.fio.main module --------------------- .. automodule:: fiona.fio.main :members: :undoc-members: :show-inheritance: fiona.fio.options module ------------------------ .. automodule:: fiona.fio.options :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: fiona.fio :members: :undoc-members: :show-inheritance: Fiona-1.7.10/docs/fiona.rst000066400000000000000000000033771317446052300154310ustar00rootroot00000000000000fiona package ============= Subpackages ----------- .. toctree:: fiona.fio Submodules ---------- fiona.collection module ----------------------- .. automodule:: fiona.collection :members: :undoc-members: :show-inheritance: fiona.compat module ------------------- .. automodule:: fiona.compat :members: :undoc-members: :show-inheritance: fiona.crs module ---------------- .. automodule:: fiona.crs :members: :undoc-members: :show-inheritance: fiona.drvsupport module ----------------------- .. automodule:: fiona.drvsupport :members: :undoc-members: :show-inheritance: fiona.errors module ------------------- .. automodule:: fiona.errors :members: :undoc-members: :show-inheritance: fiona.inspector module ---------------------- .. automodule:: fiona.inspector :members: :undoc-members: :show-inheritance: fiona.ogrext module ------------------- .. automodule:: fiona.ogrext :members: :undoc-members: :show-inheritance: fiona.ogrext1 module -------------------- .. automodule:: fiona.ogrext1 :members: :undoc-members: :show-inheritance: fiona.ogrext2 module -------------------- .. automodule:: fiona.ogrext2 :members: :undoc-members: :show-inheritance: fiona.rfc3339 module -------------------- .. automodule:: fiona.rfc3339 :members: :undoc-members: :show-inheritance: fiona.tool module ----------------- .. automodule:: fiona.tool :members: :undoc-members: :show-inheritance: fiona.transform module ---------------------- .. automodule:: fiona.transform :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: fiona :members: :undoc-members: :show-inheritance: Fiona-1.7.10/docs/index.rst000066400000000000000000000004231317446052300154310ustar00rootroot00000000000000Fiona Documentation Contents ============================ .. toctree:: :maxdepth: 2 README User Manual API Documentation CLI Documentation Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` Fiona-1.7.10/docs/manual.rst000066400000000000000000001235461317446052300156130ustar00rootroot00000000000000===================== The Fiona User Manual ===================== :Author: Sean Gillies, :Version: |release| :Date: |today| :Copyright: This work is licensed under a `Creative Commons Attribution 3.0 United States License`__. .. __: http://creativecommons.org/licenses/by/3.0/us/ :Abstract: Fiona is OGR's neat, nimble, no-nonsense API. This document explains how to use the Fiona package for reading and writing geospatial data files. Python 3 is used in examples. See the `README `__ for installation and quick start instructions. .. sectnum:: .. _intro: Introduction ============ :dfn:`Geographic information systems` (GIS) help us plan, react to, and understand changes in our physical, political, economic, and cultural landscapes. A generation ago, GIS was something done only by major institutions like nations and cities, but it's become ubiquitous today thanks to accurate and inexpensive global positioning systems, commoditization of satellite imagery, and open source software. The kinds of data in GIS are roughly divided into :dfn:`rasters` representing continuous scalar fields (land surface temperature or elevation, for example) and :dfn:`vectors` representing discrete entities like roads and administrative boundaries. Fiona is concerned exclusively with the latter. It is a Python wrapper for vector data access functions from the `OGR `_ library. A very simple wrapper for minimalists. It reads data records from files as GeoJSON-like mappings and writes the same kind of mappings as records back to files. That's it. There are no layers, no cursors, no geometric operations, no transformations between coordinate systems, no remote method calls; all these concerns are left to other Python packages such as :py:mod:`Shapely ` and :py:mod:`pyproj ` and Python language protocols. Why? To eliminate unnecessary complication. Fiona aims to be simple to understand and use, with no gotchas. Please understand this: Fiona is designed to excel in a certain range of tasks and is less optimal in others. Fiona trades memory and speed for simplicity and reliability. Where OGR's Python bindings (for example) use C pointers, Fiona copies vector data from the data source to Python objects. These are simpler and safer to use, but more memory intensive. Fiona's performance is relatively more slow if you only need access to a single record field – and of course if you just want to reproject or filter data files, nothing beats the :command:`ogr2ogr` program – but Fiona's performance is much better than OGR's Python bindings if you want *all* fields and coordinates of a record. The copying is a constraint, but it simplifies programs. With Fiona, you don't have to track references to C objects to avoid crashes, and you can work with vector data using familiar Python mapping accessors. Less worry, less time spent reading API documentation. Rules of Thumb -------------- In what cases would you benefit from using Fiona? * If the features of interest are from or destined for a file in a non-text format like ESRI Shapefiles, Mapinfo TAB files, etc. * If you're more interested in the values of many feature properties than in a single property's value. * If you're more interested in all the coordinate values of a feature's geometry than in a single value. * If your processing system is distributed or not contained to a single process. In what cases would you not benefit from using Fiona? * If your data is in or destined for a JSON document you should use Python's :py:mod:`json` or :py:mod:`simplejson` modules. * If your data is in a RDBMS like PostGIS, use a Python DB package or ORM like :py:mod:`SQLAlchemy` or :py:mod:`GeoAlchemy`. Maybe you're using :py:mod:`GeoDjango` already. If so, carry on. * If your data is served via HTTP from CouchDB or CartoDB, etc, use an HTTP package (:py:mod:`httplib2`, :py:mod:`Requests`, etc) or the provider's Python API. * If you can use :command:`ogr2ogr`, do so. Example ------- The first example of using Fiona is this: copying records from one file to another, adding two attributes and making sure that all polygons are facing "up". Orientation of polygons is significant in some applications, extruded polygons in Google Earth for one. No other library (like :py:mod:`Shapely`) is needed here, which keeps it uncomplicated. There's a :file:`test_uk` file in the Fiona repository for use in this and other examples. .. sourcecode:: python import datetime import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def signed_area(coords): """Return the signed area enclosed by a ring using the linear time algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 indicates a counter-clockwise oriented ring. """ xs, ys = map(list, zip(*coords)) xs.append(xs[1]) ys.append(ys[1]) return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 with fiona.open('docs/data/test_uk.shp', 'r') as source: # Copy the source schema and add two new properties. sink_schema = source.schema.copy() sink_schema['properties']['s_area'] = 'float' sink_schema['properties']['timestamp'] = 'datetime' # Create a sink for processed features with the same format and # coordinate reference system as the source. with fiona.open( 'oriented-ccw.shp', 'w', crs=source.crs, driver=source.driver, schema=sink_schema, ) as sink: for f in source: try: # If any feature's polygon is facing "down" (has rings # wound clockwise), its rings will be reordered to flip # it "up". g = f['geometry'] assert g['type'] == "Polygon" rings = g['coordinates'] sa = sum(signed_area(r) for r in rings) if sa < 0.0: rings = [r[::-1] for r in rings] g['coordinates'] = rings f['geometry'] = g # Add the signed area of the polygon and a timestamp # to the feature properties map. f['properties'].update( s_area=sa, timestamp=datetime.datetime.now().isoformat() ) sink.write(f) except Exception, e: logging.exception("Error processing feature %s:", f['id']) # The sink file is written to disk and closed when its block ends. Data Model ========== Discrete geographic features are usually represented in geographic information systems by :dfn:`records`. The characteristics of records and their semantic implications are well known [Kent1978]_. Among those most significant for geographic data: records have a single type, all records of that type have the same fields, and a record's fields concern a single geographic feature. Different systems model records in different ways, but the various models have enough in common that programmers have been able to create useful abstract data models. The `OGR model `__ is one. Its primary entities are :dfn:`Data Sources`, :dfn:`Layers`, and :dfn:`Features`. Features have not fields, but attributes and a :dfn:`Geometry`. An OGR Layer contains Features of a single type ("roads" or "wells", for example). The GeoJSON model is a bit more simple, keeping Features and substituting :dfn:`Feature Collections` for OGR Data Sources and Layers. The term "Feature" is thus overloaded in GIS modeling, denoting entities in both our conceptual and data models. Various formats for record files exist. The :dfn:`ESRI Shapefile` [ESRI1998]_ has been, at least in the United States, the most significant of these up to about 2005 and remains popular today. It is a binary format. The shape fields are stored in one .shp file and the other fields in another .dbf file. The GeoJSON [GeoJSON]_ format, from 2008, proposed a human readable text format in which geometry and other attribute fields are encoded together using :dfn:`Javascript Object Notation` [JSON]_. In GeoJSON, there's a uniformity of data access. Attributes of features are accessed in the same manner as attributes of a feature collection. Coordinates of a geometry are accessed in the same manner as features of a collection. The GeoJSON format turns out to be a good model for a Python API. JSON objects and Python dictionaries are semantically and syntactically similar. Replacing object-oriented Layer and Feature APIs with interfaces based on Python mappings provides a uniformity of access to data and reduces the amount of time spent reading documentation. A Python programmer knows how to use a mapping, so why not treat features as dictionaries? Use of existing Python idioms is one of Fiona's major design principles. .. admonition:: TL;DR Fiona subscribes to the conventional record model of data, but provides GeoJSON-like access to the data via Python file-like and mapping protocols. Reading Vector Data =================== Reading a GIS vector file begins by opening it in mode ``'r'`` using Fiona's :py:func:`~fiona.open` function. It returns an opened :py:class:`~fiona.collection.Collection` object. .. sourcecode:: pycon >>> import fiona >>> c = fiona.open('docs/data/test_uk.shp', 'r') >>> c >>> c.closed False .. admonition:: API Change :py:func:`fiona.collection` is deprecated, but aliased to :py:func:`fiona.open` in version 0.9. Mode ``'r'`` is the default and will be omitted in following examples. Fiona's :py:class:`~fiona.collection.Collection` is like a Python :py:class:`file`, but is iterable for records rather than lines. .. sourcecode:: pycon >>> next(c) {'geometry': {'type': 'Polygon', 'coordinates': ... >>> len(list(c)) 48 Note that :py:func:`list` iterates over the entire collection, effectively emptying it as with a Python :py:class:`file`. .. sourcecode:: pycon >>> next(c) Traceback (most recent call last): ... StopIteration >>> len(list(c)) 0 Seeking the beginning of the file is not supported. You must reopen the collection to get back to the beginning. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> len(list(c)) 48 .. admonition:: File Encoding The format drivers will attempt to detect the encoding of your data, but may fail. In my experience GDAL 1.7.2 (for example) doesn't detect that the encoding of the Natural Earth dataset is Windows-1252. In this case, the proper encoding can be specified explicitly by using the ``encoding`` keyword parameter of :py:func:`fiona.open`: ``encoding='Windows-1252'``. New in version 0.9.1. Collection indexing ------------------- .. admonition:: New in version 1.1.6 Features of a collection may also be accessed by index. .. code-block:: pycon >>> import pprint >>> with fiona.open('docs/data/test_uk.shp') as src: ... pprint.pprint(src[1]) ... {'geometry': {'coordinates': [[(-4.663611, 51.158333), (-4.669168, 51.159439), (-4.673334, 51.161385), (-4.674445, 51.165276), (-4.67139, 51.185272), (-4.669445, 51.193054), (-4.665556, 51.195), (-4.65889, 51.195), (-4.656389, 51.192215), (-4.646389, 51.164444), (-4.646945, 51.160828), (-4.651668, 51.159439), (-4.663611, 51.158333)]], 'type': 'Polygon'}, 'id': '1', 'properties': OrderedDict([(u'CAT', 232.0), (u'FIPS_CNTRY', u'UK'), (u'CNTRY_NAME', u'United Kingdom'), (u'AREA', 244820.0), (u'POP_CNTRY', 60270708.0)]), 'type': 'Feature'} Closing Files ------------- A :py:class:`~fiona.collection.Collection` involves external resources. There's no guarantee that these will be released unless you explicitly :py:meth:`~fiona.collection.Collection.close` the object or use a :py:keyword:`with` statement. When a :py:class:`~fiona.collection.Collection` is a context guard, it is closed no matter what happens within the block. .. sourcecode:: pycon >>> try: ... with fiona.open('docs/data/test_uk.shp') as c: ... print(len(list(c))) ... assert True is False ... except: ... print(c.closed) ... raise ... 48 True Traceback (most recent call last): ... AssertionError An exception is raised in the :keyword:`with` block above, but as you can see from the print statement in the :keyword:`except` clause :py:meth:`c.__exit__` (and thereby :py:meth:`c.close`) has been called. .. important:: Always call :py:meth:`~fiona.collection.Collection.close` or use :keyword:`with` and you'll never stumble over tied-up external resources, locked files, etc. Format Drivers, CRS, Bounds, and Schema ======================================= In addition to attributes like those of :py:class:`file` (:py:attr:`~file.name`, :py:attr:`~file.mode`, :py:attr:`~file.closed`), a :py:class:`~fiona.collection.Collection` has a read-only :py:attr:`~fiona.collection.Collection.driver` attribute which names the :program:`OGR` :dfn:`format driver` used to open the vector file. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> c.driver 'ESRI Shapefile' The :dfn:`coordinate reference system` (CRS) of the collection's vector data is accessed via a read-only :py:attr:`~fiona.collection.Collection.crs` attribute. .. sourcecode:: pycon >>> c.crs {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} The CRS is represented by a mapping of :program:`PROJ.4` parameters. The :py:mod:`fiona.crs` module provides 3 functions to assist with these mappings. :py:func:`~fiona.crs.to_string` converts mappings to PROJ.4 strings: .. sourcecode:: pycon >>> from fiona.crs import to_string >>> print(to_string(c.crs)) +datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat :py:func:`~fiona.crs.from_string` does the inverse. .. sourcecode:: pycon >>> from fiona.crs import from_string >>> from_string("+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat") {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} :py:func:`~fiona.crs.from_epsg` is a shortcut to CRS mappings from EPSG codes. .. sourcecode:: pycon >>> from fiona.crs import from_epsg >>> from_epsg(3857) {'init': 'epsg:3857', 'no_defs': True} The number of records in the collection's file can be obtained via Python's built in :py:func:`len` function. .. sourcecode:: pycon >>> len(c) 48 The :dfn:`minimum bounding rectangle` (MBR) or :dfn:`bounds` of the collection's records is obtained via a read-only :py:attr:`~fiona.collection.Collection.bounds` attribute. .. sourcecode:: pycon >>> c.bounds (-8.621389, 49.911659, 1.749444, 60.844444) Finally, the schema of its record type (a vector file has a single type of record, remember) is accessed via a read-only :py:attr:`~fiona.collection.Collection.schema` attribute. It has 'geometry' and 'properties' items. The former is a string and the latter is an ordered dict with items having the same order as the fields in the data file. .. sourcecode:: pycon >>> import pprint >>> pprint.pprint(c.schema) {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} Keeping Schemas Simple ---------------------- Fiona takes a less is more approach to record types and schemas. Data about record types is structured as closely to data about records as can be done. Modulo a record's 'id' key, the keys of a schema mapping are the same as the keys of the collection's record mappings. .. sourcecode:: pycon >>> rec = next(c) >>> set(rec.keys()) - set(c.schema.keys()) {'id'} >>> set(rec['properties'].keys()) == set(c.schema['properties'].keys()) True The values of the schema mapping are either additional mappings or field type names like 'Polygon', 'float', and 'str'. The corresponding Python types can be found in a dictionary named :py:attr:`fiona.FIELD_TYPES_MAP`. .. sourcecode:: pycon >>> pprint.pprint(fiona.FIELD_TYPES_MAP) {'date': , 'datetime': , 'float': , 'int': , 'str': , 'time': } Field Types ----------- In a nutshell, the types and their names are as near to what you'd expect in Python (or Javascript) as possible. The 'str' vs 'unicode' muddle is a fact of life in Python < 3.0. Fiona records have Unicode strings, but their field type name is 'str' (looking forward to Python 3). .. sourcecode:: pycon >>> type(rec['properties']['CNTRY_NAME']) >>> c.schema['properties']['CNTRY_NAME'] 'str' >>> fiona.FIELD_TYPES_MAP[c.schema['properties']['CNTRY_NAME']] String type fields may also indicate their maximum width. A value of 'str:25' indicates that all values will be no longer than 25 characters. If this value is used in the schema of a file opened for writing, values of that property will be truncated at 25 characters. The default width is 80 chars, which means 'str' and 'str:80' are more or less equivalent. Fiona provides a function to get the width of a property. .. sourcecode:: pycon >>> from fiona import prop_width >>> prop_width('str:25') 25 >>> prop_width('str') 80 Another function gets the proper Python type of a property. .. sourcecode:: pycon >>> from fiona import prop_type >>> prop_type('int') >>> prop_type('float') >>> prop_type('str:25') The example above is for Python 3. With Python 2, the type of 'str' properties is 'unicode'. .. sourcecode:: pycon >>> prop_type('str:25') Geometry Types -------------- Fiona supports the geometry types in GeoJSON and their 3D variants. This means that the value of a schema's geometry item will be one of the following: - Point - LineString - Polygon - MultiPoint - MultiLineString - MultiPolygon - GeometryCollection - 3D Point - 3D LineString - 3D Polygon - 3D MultiPoint - 3D MultiLineString - 3D MultiPolygon - 3D GeometryCollection The last seven of these, the 3D types, apply only to collection schema. The geometry types of features are always one of the first seven. A '3D Point' collection, for example, always has features with geometry type 'Point'. The coordinates of those geometries will be (x, y, z) tuples. Note that one of the most common vector data formats, Esri's Shapefile, has no 'MultiLineString' or 'MultiPolygon' schema geometries. However, a Shapefile that indicates 'Polygon' in its schema may yield either 'Polygon' or 'MultiPolygon' features. Records ======= A record you get from a collection is a Python :py:class:`dict` structured exactly like a GeoJSON Feature. Fiona records are self-describing; the names of its fields are contained within the data structure and the values in the fields are typed properly for the type of record. Numeric field values are instances of type :py:class:`int` and :py:class:`float`, for example, not strings. .. sourcecode:: pycon >>> pprint.pprint(rec) {'geometry': {'coordinates': [[(-4.663611, 51.158333), (-4.669168, 51.159439), (-4.673334, 51.161385), (-4.674445, 51.165276), (-4.67139, 51.185272), (-4.669445, 51.193054), (-4.665556, 51.195), (-4.65889, 51.195), (-4.656389, 51.192215), (-4.646389, 51.164444), (-4.646945, 51.160828), (-4.651668, 51.159439), (-4.663611, 51.158333)]], 'type': 'Polygon'}, 'id': '1', 'properties': {'CAT': 232.0, 'FIPS_CNTRY': 'UK', 'CNTRY_NAME': 'United Kingdom', 'AREA': 244820.0, 'POP_CNTRY': 60270708.0}} The record data has no references to the :py:class:`~fiona.collection.Collection` from which it originates or to any other external resource. It's entirely independent and safe to use in any way. Closing the collection does not affect the record at all. .. sourcecode:: pycon >>> c.close() >>> rec['id'] '1' Record Id --------- A record has an ``id`` key. As in the GeoJSON specification, its corresponding value is a string unique within the data file. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> rec = next(c) >>> rec['id'] '0' .. admonition:: OGR Details In the :program:`OGR` model, feature ids are long integers. Fiona record ids are therefore usually string representations of integer record indexes. Record Properties ----------------- A record has a ``properties`` key. Its corresponding value is a mapping: an ordered dict to be precise. The keys of the properties mapping are the same as the keys of the properties mapping in the schema of the collection the record comes from (see above). .. sourcecode:: pycon >>> pprint.pprint(rec['properties']) {'CAT': 232.0, 'FIPS_CNTRY': 'UK', 'CNTRY_NAME': 'United Kingdom', 'AREA': 244820.0, 'POP_CNTRY': 60270708.0} Record Geometry --------------- A record has a ``geometry`` key. Its corresponding value is a mapping with ``type`` and ``coordinates`` keys. .. sourcecode:: pycon >>> pprint.pprint(rec['geometry']) {'coordinates': [[(0.899167, 51.357216), (0.885278, 51.35833), (0.7875, 51.369438), (0.781111, 51.370552), (0.766111, 51.375832), (0.759444, 51.380829), (0.745278, 51.39444), (0.740833, 51.400276), (0.735, 51.408333), (0.740556, 51.429718), (0.748889, 51.443604), (0.760278, 51.444717), (0.791111, 51.439995), (0.892222, 51.421387), (0.904167, 51.418884), (0.908889, 51.416939), (0.930555, 51.398888), (0.936667, 51.393608), (0.943889, 51.384995), (0.9475, 51.378609), (0.947778, 51.374718), (0.946944, 51.371109), (0.9425, 51.369164), (0.904722, 51.358055), (0.899167, 51.357216)]], 'type': 'Polygon'} Since the coordinates are just tuples, or lists of tuples, or lists of lists of tuples, the ``type`` tells you how to interpret them. +-------------------+---------------------------------------------------+ | Type | Coordinates | +===================+===================================================+ | Point | A single (x, y) tuple | +-------------------+---------------------------------------------------+ | LineString | A list of (x, y) tuple vertices | +-------------------+---------------------------------------------------+ | Polygon | A list of rings (each a list of (x, y) tuples) | +-------------------+---------------------------------------------------+ | MultiPoint | A list of points (each a single (x, y) tuple) | +-------------------+---------------------------------------------------+ | MultiLineString | A list of lines (each a list of (x, y) tuples) | +-------------------+---------------------------------------------------+ | MultiPolygon | A list of polygons (see above) | +-------------------+---------------------------------------------------+ Fiona, like the GeoJSON format, has both Northern Hemisphere "North is up" and Cartesian "X-Y" biases. The values within a tuple that denoted as ``(x, y)`` above are either (longitude E of the prime meridian, latitude N of the equator) or, for other projected coordinate systems, (easting, northing). .. admonition:: Long-Lat, not Lat-Long Even though most of us say "lat, long" out loud, Fiona's ``x,y`` is always easting, northing, which means ``(long, lat)``. Longitude first and latitude second, consistent with the GeoJSON format specification. Point Set Theory and Simple Features ------------------------------------ In a proper, well-scrubbed vector data file the geometry mappings explained above are representations of geometric objects made up of :dfn:`point sets`. The following .. sourcecode:: python {'type': 'LineString', 'coordinates': [(0.0, 0.0), (0.0, 1.0)]} represents not just two points, but the set of infinitely many points along the line of length 1.0 from ``(0.0, 0.0)`` to ``(0.0, 1.0)``. In the application of point set theory commonly called :dfn:`Simple Features Access` [SFA]_ two geometric objects are equal if their point sets are equal whether they are equal in the Python sense or not. If you have Shapely (which implements Simple Features Access) installed, you can see this in by verifying the following. .. sourcecode:: pycon >>> from shapely.geometry import shape >>> l1 = shape( ... {'type': 'LineString', 'coordinates': [(0, 0), (2, 2)]}) >>> l2 = shape( ... {'type': 'LineString', 'coordinates': [(0, 0), (1, 1), (2, 2)]}) >>> l1 == l2 False >>> l1.equals(l2) True .. admonition:: Dirty data Some files may contain vectors that are :dfn:`invalid` from a simple features standpoint due to accident (inadequate quality control on the producer's end) or intention ("dirty" vectors saved to a file for special treatment). Fiona doesn't sniff for or attempt to clean dirty data, so make sure you're getting yours from a clean source. Writing Vector Data =================== A vector file can be opened for writing in mode ``'a'`` (append) or mode ``'w'`` (write). .. admonition:: Note The in situ "update" mode of :program:`OGR` is quite format dependent and is therefore not supported by Fiona. Appending Data to Existing Files -------------------------------- Let's start with the simplest if not most common use case, adding new records to an existing file. The file is copied before modification and a suitable record extracted in the example below. .. sourcecode:: pycon >>> with fiona.open('docs/data/test_uk.shp') as c: ... rec = next(c) >>> rec['id'] = '-1' >>> rec['properties']['CNTRY_NAME'] = 'Gondor' >>> import os >>> os.system("cp docs/data/test_uk.* /tmp") 0 The coordinate reference system. format, and schema of the file are already defined, so it's opened with just two arguments as for reading, but in ``'a'`` mode. The new record is written to the end of the file using the :py:meth:`~fiona.collection.Collection.write` method. Accordingly, the length of the file grows from 48 to 49. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... print(len(c)) ... c.write(rec) ... print(len(c)) ... 48 49 The record you write must match the file's schema (because a file contains one type of record, remember). You'll get a :py:class:`ValueError` if it doesn't. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... c.write({'properties': {'foo': 'bar'}}) ... Traceback (most recent call last): ... ValueError: Record data not match collection schema Now, what about record ids? The id of a record written to a file is ignored and replaced by the next value appropriate for the file. If you read the file just appended to above, .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... records = list(c) >>> records[-1]['id'] '48' >>> records[-1]['properties']['CNTRY_NAME'] 'Gondor' You'll see that the id of ``'-1'`` which the record had when written is replaced by ``'48'``. The :py:meth:`~fiona.collection.Collection.write` method writes a single record to the collection's file. Its sibling :py:meth:`~fiona.collection.Collection.writerecords` writes a sequence (or iterator) of records. .. sourcecode:: pycon >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: ... c.writerecords([rec, rec, rec]) ... print(len(c)) ... 52 .. admonition:: Duplication Fiona's collections do not guard against duplication. The code above will write 3 duplicate records to the file, and they will be given unique sequential ids. .. admonition:: Buffering Fiona's output is buffered. The records passed to :py:meth:`write` and :py:meth:`writerecords` are flushed to disk when the collection is closed. You may also call :py:meth:`flush` periodically to write the buffer contents to disk. Writing New Files ----------------- Writing a new file is more complex than appending to an existing file because the file CRS, format, and schema have not yet been defined and must be done so by the programmer. Still, it's not very complicated. A schema is just a mapping, as described above. A CRS is also just a mapping, and the possible formats are enumerated in the :py:attr:`fiona.drivers` list. Copy the parameters of our demo file. .. sourcecode:: pycon >>> with fiona.open('docs/data/test_uk.shp') as source: ... source_driver = source.driver ... source_crs = source.crs ... source_schema = source.schema ... >>> source_driver 'ESRI Shapefile' >>> source_crs {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} >>> pprint.pprint(source_schema) {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} And now create a new file using them. .. sourcecode:: pycon >>> with fiona.open( ... '/tmp/foo.shp', ... 'w', ... driver=source_driver, ... crs=source_crs, ... schema=source_schema) as c: ... print(len(c)) ... c.write(rec) ... print(len(c)) ... 0 1 >>> c.closed True >>> len(c) 1 Because the properties of the source schema are ordered and are passed in the same order to the write-mode collection, the written file's fields have the same order as those of the source file. .. sourcecode:: console $ ogrinfo /tmp/foo.shp foo -so INFO: Open of `/tmp/foo.shp' using driver `ESRI Shapefile' successful. Layer name: foo Geometry: 3D Polygon Feature Count: 1 Extent: (0.735000, 51.357216) - (0.947778, 51.444717) Layer SRS WKT: GEOGCS["GCS_WGS_1984", DATUM["WGS_1984", SPHEROID["WGS_84",6378137,298.257223563]], PRIMEM["Greenwich",0], UNIT["Degree",0.017453292519943295]] CAT: Real (16.0) FIPS_CNTRY: String (80.0) CNTRY_NAME: String (80.0) AREA: Real (15.2) POP_CNTRY: Real (15.2) The :py:attr:`~fiona.collection.Collection.meta` attribute makes duplication of a file's meta properties even easier. .. sourcecode:: pycon >>> source = fiona.open('docs/data/test_uk.shp') >>> sink = fiona.open('/tmp/foo.shp', 'w', **source.meta) Ordering Record Fields ...................... Beginning with Fiona 1.0.1, the 'properties' item of :py:func:`fiona.open`'s 'schema' keyword argument may be an ordered dict or a list of (key, value) pairs, specifying an ordering that carries into written files. If an ordinary dict is given, the ordering is determined by the output of that dict's :py:func:`~items` method. For example, since .. sourcecode:: pycon >>> {'bar': 'int', 'foo': 'str'}.keys() ['foo', 'bar'] a schema of ``{'properties': {'bar': 'int', 'foo': 'str'}}`` will produce a shapefile where the first field is 'foo' and the second field is 'bar'. If you want 'bar' to be the first field, you must use a list of property items .. sourcecode:: python c = fiona.open( '/tmp/file.shp', 'w', schema={'properties': [('bar', 'int'), ('foo', 'str')], ...}, ... ) or an ordered dict. .. sourcecode:: python from collections import OrderedDict schema_props = OrderedDict([('bar', 'int'), ('foo', 'str')]) c = fiona.open( '/tmp/file.shp', 'w', schema={'properties': schema_props, ...}, ... ) Coordinates and Geometry Types ------------------------------ If you write 3D coordinates, ones having (x, y, z) tuples, to a 2D file ('Point' schema geometry, for example) the z values will be lost. If you write 2D coordinates, ones having only (x, y) tuples, to a 3D file ('3D Point' schema geometry, for example) a default z value of 0 will be provided. Advanced Topics =============== Slicing and masking iterators ----------------------------- With some vector data formats a spatial index accompanies the data file, allowing efficient bounding box searches. A collection's :py:meth:`~fiona.collection.Collection.items` method returns an iterator over pairs of FIDs and records that intersect a given ``(minx, miny, maxx, maxy)`` bounding box or geometry object. The collection's own coordinate reference system (see below) is used to interpret the box's values. If you want a list of the iterator's items, pass it to Python's builtin :py:func:`list` as shown below. .. sourcecode:: pycon >>> c = fiona.open('docs/data/test_uk.shp') >>> hits = list(c.items(bbox=(-5.0, 55.0, 0.0, 60.0))) >>> len(hits) 7 The iterator method takes the same ``stop`` or ``start, stop[, step]`` slicing arguments as :py:func:`itertools.islice`. To get just the first two items from that iterator, pass a stop index. .. sourcecode:: pycon >>> hits = c.items(2, bbox=(-5.0, 55.0, 0.0, 60.0)) >>> len(list(hits)) 2 To get the third through fifth items from that iterator, pass start and stop indexes. .. sourcecode:: pycon >>> hits = c.items(2, 5, bbox=(-5.0, 55.0, 0.0, 60.0)) >>> len(list(hits)) 3 To filter features by property values, use Python's builtin :py:func:`filter` and :py:keyword:`lambda` or your own filter function that takes a single feature record and returns ``True`` or ``False``. .. sourcecode:: pycon >>> def pass_positive_area(rec): ... return rec['properties'].get('AREA', 0.0) > 0.0 ... >>> c = fiona.open('docs/data/test_uk.shp') >>> hits = filter(pass_positive_area, c) >>> len(list(hits)) 48 Reading Multilayer data ----------------------- Up to this point, only simple datasets with one thematic layer or feature type per file have been shown and the venerable Esri Shapefile has been the primary example. Other GIS data formats can encode multiple layers or feature types within a single file or directory. Esri's `File Geodatabase `__ is one example of such a format. A more useful example, for the purpose of this manual, is a directory comprising multiple shapefiles. The following three shell commands will create just such a two layered data source from the test data distributed with Fiona. .. sourcecode:: console $ mkdir /tmp/data $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln foo $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln bar The layers of a data source can be listed using :py:func:`fiona.listlayers`. In the shapefile format case, layer names match base names of the files. .. sourcecode:: pycon >>> fiona.listlayers('/tmp/data') ['bar', 'foo'] Unlike OGR, Fiona has no classes representing layers or data sources. To access the features of a layer, open a collection using the path to the data source and specify the layer by name using the `layer` keyword. .. sourcecode:: pycon >>> import pprint >>> datasrc_path = '/tmp/data' >>> for name in fiona.listlayers(datasrc_path): ... with fiona.open(datasrc_path, layer=name) as c: ... pprint.pprint(c.schema) ... {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} {'geometry': 'Polygon', 'properties': {'CAT': 'float:16', 'FIPS_CNTRY': 'str', 'CNTRY_NAME': 'str', 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} Layers may also be specified by their index. .. sourcecode:: pycon >>> for i, name in enumerate(fiona.listlayers(datasrc_path)): ... with fiona.open(datasrc_path, layer=i) as c: ... print(len(c)) ... 48 48 If no layer is specified, :py:func:`fiona.open` returns an open collection using the first layer. .. sourcecode:: pycon >>> with fiona.open(datasrc_path) as c: ... c.name == fiona.listlayers(datasrc_path)[0] ... True The most general way to open a shapefile for reading, using all of the parameters of :py:func:`fiona.open`, is to treat it as a data source with a named layer. .. sourcecode:: pycon >>> fiona.open('docs/data/test_uk.shp', 'r', layer='test_uk') In practice, it is fine to rely on the implicit first layer and default ``'r'`` mode and open a shapefile like this: .. sourcecode:: pycon >>> fiona.open('docs/data/test_uk.shp') Writing Multilayer data ----------------------- To write an entirely new layer to a multilayer data source, simply provide a unique name to the `layer` keyword argument. .. sourcecode:: pycon >>> 'wah' not in fiona.listlayers(datasrc_path) True >>> with fiona.open(datasrc_path, layer='bar') as c: ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: ... d.write(next(c)) ... >>> fiona.listlayers(datasrc_path) ['bar', 'foo', 'wah'] In ``'w'`` mode, existing layers will be overwritten if specified, just as normal files are overwritten by Python's :py:func:`open` function. .. sourcecode:: pycon >>> 'wah' in fiona.listlayers(datasrc_path) True >>> with fiona.open(datasrc_path, layer='bar') as c: ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: ... # Overwrites the existing layer named 'wah'! Virtual filesystems ------------------- Zip and Tar archives can be treated as virtual filesystems and collections can be made from paths and layers within them. In other words, Fiona lets you read zipped shapefiles. For example, make a Zip archive from the shapefile distributed with Fiona. .. sourcecode:: console $ zip /tmp/zed.zip docs/data/test_uk.* adding: docs/data/test_uk.shp (deflated 48%) adding: docs/data/test_uk.shx (deflated 37%) adding: docs/data/test_uk.dbf (deflated 98%) adding: docs/data/test_uk.prj (deflated 15%) The `vfs` keyword parameter for :py:func:`fiona.listlayers` and :py:func:`fiona.open` may be an Apache Commons VFS style string beginning with "zip://" or "tar://" and followed by an absolute or relative path to the archive file. When this parameter is used, the first argument to must be an absolute path within that archive. The layers in that Zip archive are: .. sourcecode:: pycon >>> import fiona >>> fiona.listlayers('/docs/data', vfs='zip:///tmp/zed.zip') ['test_uk'] The single shapefile may also be accessed like so: .. sourcecode:: pycon >>> with fiona.open( ... '/docs/data/test_uk.shp', ... vfs='zip:///tmp/zed.zip') as c: ... print(len(c)) ... 48 Dumpgj ====== Fiona installs a script named ``dumpgj``. It converts files to GeoJSON with JSON-LD context as an option and is intended to be an upgrade to "ogr2ogr -f GeoJSON". .. sourcecode:: console $ dumpgj --help usage: dumpgj [-h] [-d] [-n N] [--compact] [--encoding ENC] [--record-buffered] [--ignore-errors] [--use-ld-context] [--add-ld-context-item TERM=URI] infile [outfile] Serialize a file's records or description to GeoJSON positional arguments: infile input file name outfile output file name, defaults to stdout if omitted optional arguments: -h, --help show this help message and exit -d, --description serialize file's data description (schema) only -n N, --indent N indentation level in N number of chars --compact use compact separators (',', ':') --encoding ENC Specify encoding of the input file --record-buffered Economical buffering of writes at record, not collection (default), level --ignore-errors log errors but do not stop serialization --use-ld-context add a JSON-LD context to JSON output --add-ld-context-item TERM=URI map a term to a URI and add it to the output's JSON LD context Final Notes =========== This manual is a work in progress and will grow and improve with Fiona. Questions and suggestions are very welcome. Please feel free to use the `issue tracker `__ or email the author directly. Do see the `README `__ for installation instructions and information about supported versions of Python and other software dependencies. Fiona would not be possible without the `contributions of other developers `__, especially Frank Warmerdam and Even Rouault, the developers of GDAL/OGR; and Mike Weisman, who saved Fiona from neglect and obscurity. References ========== .. [Kent1978] William Kent, Data and Reality, North Holland, 1978. .. [ESRI1998] ESRI Shapefile Technical Description. July 1998. http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf .. [GeoJSON] http://geojson.org .. [JSON] http://www.ietf.org/rfc/rfc4627 .. [SFA] http://en.wikipedia.org/wiki/Simple_feature_access Fiona-1.7.10/docs/modules.rst000066400000000000000000000000641317446052300157730ustar00rootroot00000000000000fiona ===== .. toctree:: :maxdepth: 4 fiona Fiona-1.7.10/examples/000077500000000000000000000000001317446052300144575ustar00rootroot00000000000000Fiona-1.7.10/examples/open.py000066400000000000000000000051111317446052300157700ustar00rootroot00000000000000 import fiona # This module contains examples of opening files to get feature collections in # different ways. # # It is meant to be run from the distribution root, the directory containing # setup.py. # # A ``path`` is always the ``open()`` function's first argument. It can be # absolute or relative to the working directory. It is the only positional # argument, though it is conventional to use the mode as a 2nd positional # argument. # 1. Opening a file with a single data layer (shapefiles, etc). # # args: path, mode # kwds: none # # The relative path to a file on the filesystem is given and its single layer # is selected implicitly (a shapefile has a single layer). The file is opened # for reading (mode 'r'), but since this is the default, we'll omit it in # following examples. with fiona.open('docs/data/test_uk.shp', 'r') as c: assert len(c) == 48 # 2. Opening a file with explicit layer selection (FileGDB, etc). # # args: path # kwds: layer # # Same as above but layer specified explicitly by name.. with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c: assert len(c) == 48 # 3. Opening a directory for access to a single file. # # args: path # kwds: layer # # Same as above but using the path to the directory containing the shapefile, # specified explicitly by name. with fiona.open('docs/data', layer='test_uk') as c: assert len(c) == 48 # 4. Opening a single file within a zip archive. # # args: path # kwds: vfs # # Open a file given its absolute path within a virtual filesystem. The VFS # is given an Apache Commons VFS identifier. It may contain either an absolute # path or a path relative to the working directory. # # Example archive: # # $ unzip -l docs/data/test_uk.zip # Archive: docs/data/test_uk.zip # Length Date Time Name # -------- ---- ---- ---- # 10129 04-08-13 20:49 test_uk.dbf # 143 04-08-13 20:49 test_uk.prj # 65156 04-08-13 20:49 test_uk.shp # 484 04-08-13 20:49 test_uk.shx # -------- ------- # 75912 4 files with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c: assert len(c) == 48 # 5. Opening a directory within a zip archive to select a layer. # # args: path # kwds: layer, vfs # # The most complicated case. As above, but specifying the root directory within # the virtual filesystem as the path and the layer by name (combination of # 4 and 3). It ought to be possible to open a file geodatabase within a zip # file like this. with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c: assert len(c) == 48 Fiona-1.7.10/examples/orient-ccw.py000066400000000000000000000040421317446052300171030ustar00rootroot00000000000000# An example of flipping feature polygons right side up. import datetime import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def signed_area(coords): """Return the signed area enclosed by a ring using the linear time algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 indicates a counter-clockwise oriented ring. """ xs, ys = map(list, zip(*coords)) xs.append(xs[1]) ys.append(ys[1]) return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 with fiona.open('docs/data/test_uk.shp', 'r') as source: # Copy the source schema and add two new properties. schema = source.schema.copy() schema['properties']['s_area'] = 'float' schema['properties']['timestamp'] = 'str' # Create a sink for processed features with the same format and # coordinate reference system as the source. with fiona.open( 'oriented-ccw.shp', 'w', driver=source.driver, schema=schema, crs=source.crs ) as sink: for f in source: try: # If any feature's polygon is facing "down" (has rings # wound clockwise), its rings will be reordered to flip # it "up". g = f['geometry'] assert g['type'] == 'Polygon' rings = g['coordinates'] sa = sum(signed_area(r) for r in rings) if sa < 0.0: rings = [r[::-1] for r in rings] g['coordinates'] = rings f['geometry'] = g # Add the signed area of the polygon and a timestamp # to the feature properties map. f['properties'].update( s_area=sa, timestamp=datetime.datetime.now().isoformat() ) sink.write(f) except Exception, e: logging.exception("Error processing feature %s:", f['id']) Fiona-1.7.10/examples/with-descartes-functional.py000066400000000000000000000011571317446052300221230ustar00rootroot00000000000000# Making maps with reduce() from matplotlib import pyplot from descartes import PolygonPatch import fiona BLUE = '#6699cc' def render(fig, rec): """Given matplotlib axes and a record, adds the record as a patch and returns the axes so that reduce() can accumulate more patches.""" fig.gca().add_patch( PolygonPatch(rec['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)) return fig with fiona.open('docs/data/test_uk.shp', 'r') as source: fig = reduce(render, source, pyplot.figure(figsize=(8, 8))) fig.gca().autoscale(tight=False) fig.savefig('with-descartes-functional.png') Fiona-1.7.10/examples/with-descartes.py000066400000000000000000000013331317446052300177570ustar00rootroot00000000000000 import subprocess from matplotlib import pyplot from descartes import PolygonPatch import fiona # Set up the figure and axes. BLUE = '#6699cc' fig = pyplot.figure(1, figsize=(6, 6), dpi=90) ax = fig.add_subplot(111) with fiona.drivers(): # For each feature in the collection, add a patch to the axes. with fiona.open('docs/data/test_uk.shp', 'r') as input: for f in input: ax.add_patch( PolygonPatch( f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2 )) # Should be able to get extents from the collection in a future version # of Fiona. ax.set_xlim(-9.25, 2.75) ax.set_ylim(49.5, 61.5) fig.savefig('test_uk.png') subprocess.call(['open', 'test_uk.png']) Fiona-1.7.10/examples/with-pyproj.py000066400000000000000000000022111317446052300173210ustar00rootroot00000000000000 import logging import sys from pyproj import Proj, transform import fiona from fiona.crs import from_epsg logging.basicConfig(stream=sys.stderr, level=logging.INFO) with fiona.open('docs/data/test_uk.shp', 'r') as source: sink_schema = source.schema.copy() p_in = Proj(source.crs) with fiona.open( 'with-pyproj.shp', 'w', crs=from_epsg(27700), driver=source.driver, schema=sink_schema, ) as sink: p_out = Proj(sink.crs) for f in source: try: assert f['geometry']['type'] == "Polygon" new_coords = [] for ring in f['geometry']['coordinates']: x2, y2 = transform(p_in, p_out, *zip(*ring)) new_coords.append(zip(x2, y2)) f['geometry']['coordinates'] = new_coords sink.write(f) except Exception, e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error transforming feature %s:", f['id']) Fiona-1.7.10/examples/with-shapely.py000066400000000000000000000020331317446052300174450ustar00rootroot00000000000000 import logging import sys from shapely.geometry import mapping, shape import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) with fiona.open('docs/data/test_uk.shp', 'r') as source: # **source.meta is a shortcut to get the crs, driver, and schema # keyword arguments from the source Collection. with fiona.open( 'with-shapely.shp', 'w', **source.meta) as sink: for f in source: try: geom = shape(f['geometry']) if not geom.is_valid: clean = geom.buffer(0.0) assert clean.is_valid assert clean.geom_type == 'Polygon' geom = clean f['geometry'] = mapping(geom) sink.write(f) except Exception, e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error cleaning feature %s:", f['id']) Fiona-1.7.10/fiona/000077500000000000000000000000001317446052300137355ustar00rootroot00000000000000Fiona-1.7.10/fiona/__init__.py000066400000000000000000000235501317446052300160530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Fiona is OGR's neat, nimble, no-nonsense API. Fiona provides a minimal, uncomplicated Python interface to the open source GIS community's most trusted geodata access library and integrates readily with other Python GIS packages such as pyproj, Rtree and Shapely. How minimal? Fiona can read features as mappings from shapefiles or other GIS vector formats and write mappings as features to files using the same formats. That's all. There aren't any feature or geometry classes. Features and their geometries are just data. A Fiona feature is a Python mapping inspired by the GeoJSON format. It has `id`, 'geometry`, and `properties` keys. The value of `id` is a string identifier unique within the feature's parent collection. The `geometry` is another mapping with `type` and `coordinates` keys. The `properties` of a feature is another mapping corresponding to its attribute table. For example: {'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'label': u'Null Island'} } is a Fiona feature with a point geometry and one property. Features are read and written using objects returned by the ``collection`` function. These ``Collection`` objects are a lot like Python ``file`` objects. A ``Collection`` opened in reading mode serves as an iterator over features. One opened in a writing mode provides a ``write`` method. Usage ----- Here's an example of reading a select few polygon features from a shapefile and for each, picking off the first vertex of the exterior ring of the polygon and using that as the point geometry for a new feature writing to a "points.shp" file. >>> import fiona >>> with fiona.open('docs/data/test_uk.shp', 'r') as inp: ... output_schema = inp.schema.copy() ... output_schema['geometry'] = 'Point' ... with collection( ... "points.shp", "w", ... crs=inp.crs, ... driver="ESRI Shapefile", ... schema=output_schema ... ) as out: ... for f in inp.filter( ... bbox=(-5.0, 55.0, 0.0, 60.0) ... ): ... value = f['geometry']['coordinates'][0][0] ... f['geometry'] = { ... 'type': 'Point', 'coordinates': value} ... out.write(f) Because Fiona collections are context managers, they are closed and (in writing modes) flush contents to disk when their ``with`` blocks end. """ import logging import os from six import string_types from fiona.collection import Collection, BytesCollection, vsi_path from fiona._drivers import driver_count, GDALEnv from fiona.drvsupport import supported_drivers from fiona.compat import OrderedDict from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove from fiona.ogrext import ( calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name) # These modules are imported by fiona.ogrext, but are also import here to # help tools like cx_Freeze find them automatically from fiona import _geometry, _err, rfc3339 import uuid __all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width'] __version__ = "1.7.10" __gdal_version__ = get_gdal_release_name().decode('utf-8') log = logging.getLogger(__name__) def open( path, mode='r', driver=None, schema=None, crs=None, encoding=None, layer=None, vfs=None, enabled_drivers=None, crs_wkt=None): """Open file at ``path`` in ``mode`` "r" (read), "a" (append), or "w" (write) and return a ``Collection`` object. In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see OGR docs or ``ogr2ogr --help`` on the command line) and a schema mapping such as: {'geometry': 'Point', 'properties': [('class', 'int'), ('label', 'str'), ('value', 'float')]} must be provided. If a particular ordering of properties ("fields" in GIS parlance) in the written file is desired, a list of (key, value) pairs as above or an ordered dict is required. If no ordering is needed, a standard dict will suffice. A coordinate reference system for collections in write mode can be defined by the ``crs`` parameter. It takes Proj4 style mappings like {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True} short hand strings like EPSG:4326 or WKT representations of coordinate reference systems. The drivers used by Fiona will try to detect the encoding of data files. If they fail, you may provide the proper ``encoding``, such as 'Windows-1252' for the Natural Earth datasets. When the provided path is to a file containing multiple named layers of data, a layer can be singled out by ``layer``. A virtual filesystem can be specified. The ``vfs`` parameter may be an Apache Commons VFS style string beginning with "zip://" or "tar://"". In this case, the ``path`` must be an absolute path within that container. The drivers enabled for opening datasets may be restricted to those listed in the ``enabled_drivers`` parameter. This and the ``driver`` parameter afford much control over opening of files. # Trying only the GeoJSON driver when opening to read, the # following raises ``DataIOError``: fiona.open('example.shp', driver='GeoJSON') # Trying first the GeoJSON driver, then the Shapefile driver, # the following succeeds: fiona.open( 'example.shp', enabled_drivers=['GeoJSON', 'ESRI Shapefile']) """ # Parse the vfs into a vsi and an archive path. path, vsi, archive = parse_paths(path, vfs) if mode in ('a', 'r'): if archive: if not os.path.exists(archive): raise IOError("no such archive file: %r" % archive) elif path != '-' and not os.path.exists(path): raise IOError("no such file or directory: %r" % path) c = Collection(path, mode, driver=driver, encoding=encoding, layer=layer, vsi=vsi, archive=archive, enabled_drivers=enabled_drivers) elif mode == 'w': if schema: # Make an ordered dict of schema properties. this_schema = schema.copy() this_schema['properties'] = OrderedDict(schema['properties']) else: this_schema = None c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, encoding=encoding, layer=layer, vsi=vsi, archive=archive, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt) else: raise ValueError( "mode string must be one of 'r', 'w', or 'a', not %s" % mode) return c collection = open def remove(path_or_collection, driver=None): """Deletes an OGR data source The required ``path`` argument may be an absolute or relative file path. Alternatively, a Collection can be passed instead in which case the path and driver are automatically determined. Otherwise the ``driver`` argument must be specified. Raises a ``RuntimeError`` if the data source cannot be deleted. Example usage: fiona.remove('test.shp', 'ESRI Shapefile') """ if isinstance(path_or_collection, Collection): collection = path_or_collection path = collection.path driver = collection.driver collection.close() else: path = path_or_collection if driver is None: raise ValueError("The driver argument is required when removing a path") _remove(path, driver) def listlayers(path, vfs=None): """Returns a list of layer names in their index order. The required ``path`` argument may be an absolute or relative file or directory path. A virtual filesystem can be specified. The ``vfs`` parameter may be an Apache Commons VFS style string beginning with "zip://" or "tar://"". In this case, the ``path`` must be an absolute path within that container. """ if not isinstance(path, string_types): raise TypeError("invalid path: %r" % path) if vfs and not isinstance(vfs, string_types): raise TypeError("invalid vfs: %r" % vfs) path, vsi, archive = parse_paths(path, vfs) if archive: if not os.path.exists(archive): raise IOError("no such archive file: %r" % archive) elif not os.path.exists(path): raise IOError("no such file or directory: %r" % path) with drivers(): return _listlayers(vsi_path(path, vsi, archive)) def parse_paths(path, vfs=None): archive = vsi = None if vfs: parts = vfs.split("://") vsi = parts.pop(0) if parts else None archive = parts.pop(0) if parts else None else: parts = path.split("://") path = parts.pop() if parts else None vsi = parts.pop() if parts else None return path, vsi, archive def prop_width(val): """Returns the width of a str type property. Undefined for non-str properties. Example: >>> prop_width('str:25') 25 >>> prop_width('str') 80 """ if val.startswith('str'): return int((val.split(":")[1:] or ["80"])[0]) return None def prop_type(text): """Returns a schema property's proper Python type. Example: >>> prop_type('int') >>> prop_type('str:25') """ key = text.split(':')[0] return FIELD_TYPES_MAP[key] def drivers(*args, **kwargs): """Returns a context manager with registered drivers.""" if driver_count == 0: log.debug("Creating a chief GDALEnv in drivers()") return GDALEnv(**kwargs) else: log.debug("Creating a not-responsible GDALEnv in drivers()") return GDALEnv(**kwargs) def bounds(ob): """Returns a (minx, miny, maxx, maxy) bounding box. The ``ob`` may be a feature record or geometry.""" geom = ob.get('geometry') or ob return _bounds(geom) Fiona-1.7.10/fiona/_cpl.pxd000066400000000000000000000013351317446052300153710ustar00rootroot00000000000000# Cross-platform API functions. cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) cdef extern from "cpl_vsi.h": ctypedef struct VSILFILE: pass int VSIFCloseL (VSILFILE *) VSILFILE * VSIFileFromMemBuffer (const char * filename, unsigned char * data, int data_len, int take_ownership) int VSIUnlink (const char * pathname) ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY Fiona-1.7.10/fiona/_crs.pxd000066400000000000000000000022421317446052300154000ustar00rootroot00000000000000# Coordinate system and transform API functions. cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH srs, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) int OSRFixup(OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) Fiona-1.7.10/fiona/_crs.pyx000066400000000000000000000036221317446052300154300ustar00rootroot00000000000000"""Extension module supporting crs.py. Calls methods from GDAL's OSR module. """ from __future__ import absolute_import import logging from six import string_types from fiona cimport _cpl from fiona.errors import CRSError logger = logging.getLogger(__name__) # Export a WKT string from input crs. def crs_to_wkt(crs): """Convert a Fiona CRS object to WKT format""" cdef void *cogr_srs = NULL cdef char *proj_c = NULL cogr_srs = OSRNewSpatialReference(NULL) if cogr_srs == NULL: raise CRSError("NULL spatial reference") # First, check for CRS strings like "EPSG:3857". if isinstance(crs, string_types): proj_b = crs.encode('utf-8') proj_c = proj_b OSRSetFromUserInput(cogr_srs, proj_c) elif isinstance(crs, dict): # EPSG is a special case. init = crs.get('init') if init: logger.debug("Init: %s", init) auth, val = init.split(':') if auth.upper() == 'EPSG': logger.debug("Setting EPSG: %s", val) OSRImportFromEPSG(cogr_srs, int(val)) else: params = [] crs['wktext'] = True for k, v in crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) logger.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b OSRImportFromProj4(cogr_srs, proj_c) else: raise ValueError("Invalid CRS") # Fixup, export to WKT, and set the GDAL dataset's projection. OSRFixup(cogr_srs) OSRExportToWkt(cogr_srs, &proj_c) if proj_c == NULL: raise CRSError("Null projection") proj_b = proj_c _cpl.CPLFree(proj_c) return proj_b.decode('utf-8') Fiona-1.7.10/fiona/_csl.pxd000066400000000000000000000002421317446052300153700ustar00rootroot00000000000000# String API functions. cdef extern from "cpl_string.h": char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) Fiona-1.7.10/fiona/_drivers.pyx000066400000000000000000000123401317446052300163140ustar00rootroot00000000000000# The GDAL and OGR driver registry. # GDAL driver management. from __future__ import absolute_import import os import os.path import logging import sys from six import string_types cdef extern from "cpl_conv.h": void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char * CPLGetConfigOption ( const char *key, const char *default) cdef extern from "cpl_error.h": void CPLSetErrorHandler (void *handler) cdef extern from "gdal.h": void GDALAllRegister() void GDALDestroyDriverManager() int GDALGetDriverCount() void * GDALGetDriver(int i) const char * GDALGetDriverShortName(void *driver) const char * GDALGetDriverLongName(void *driver) cdef extern from "ogr_api.h": void OGRRegisterDriver(void *driver) void OGRDeregisterDriver(void *driver) void OGRRegisterAll() void OGRCleanupAll() int OGRGetDriverCount() void * OGRGetDriver(int i) void * OGRGetDriverByName(const char *name) const char * OGR_Dr_GetName(void *driver) log = logging.getLogger('Fiona') class NullHandler(logging.Handler): def emit(self, record): pass log.addHandler(NullHandler()) level_map = { 0: 0, 1: logging.DEBUG, 2: logging.WARNING, 3: logging.ERROR, 4: logging.CRITICAL } code_map = { 0: 'CPLE_None', 1: 'CPLE_AppDefined', 2: 'CPLE_OutOfMemory', 3: 'CPLE_FileIO', 4: 'CPLE_OpenFailed', 5: 'CPLE_IllegalArg', 6: 'CPLE_NotSupported', 7: 'CPLE_AssertionFailed', 8: 'CPLE_NoWriteAccess', 9: 'CPLE_UserInterrupt', 10: 'CPLE_ObjectNull' } IF UNAME_SYSNAME == "Windows": cdef void * __stdcall errorHandler(int eErrClass, int err_no, char *msg) with gil: log.log(level_map[eErrClass], "%s in %s", code_map[err_no], msg) ELSE: cdef void * errorHandler(int eErrClass, int err_no, char *msg) with gil: log.log(level_map[eErrClass], "%s in %s", code_map[err_no], msg) def driver_count(): return OGRGetDriverCount() cdef class GDALEnv(object): cdef public object options def __init__(self, **options): self.options = options.copy() def __enter__(self): self.start() return self def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): self.stop() def start(self): cdef const char *key_c = NULL cdef const char *val_c = NULL if GDALGetDriverCount() == 0: GDALAllRegister() if OGRGetDriverCount() == 0: OGRRegisterAll() CPLSetErrorHandler(errorHandler) if OGRGetDriverCount() == 0: raise ValueError("Drivers not registered") if 'GDAL_DATA' in os.environ: log.debug("GDAL_DATA: %s", os.environ['GDAL_DATA']) else: whl_datadir = os.path.abspath( os.path.join(os.path.dirname(__file__), "gdal_data")) share_datadir = os.path.join(sys.prefix, 'share/gdal') if os.path.exists(os.path.join(whl_datadir, 'pcs.csv')): os.environ['GDAL_DATA'] = whl_datadir log.debug("Set GDAL_DATA = %r", whl_datadir) elif os.path.exists(os.path.join(share_datadir, 'pcs.csv')): os.environ['GDAL_DATA'] = share_datadir log.debug("Set GDAL_DATA = %r", share_datadir) else: log.warning("GDAL data files not located, GDAL_DATA not set") if 'PROJ_LIB' in os.environ: log.debug("PROJ_LIB: %s", os.environ['PROJ_LIB']) else: whl_datadir = os.path.abspath( os.path.join(os.path.dirname(__file__), "proj_data")) share_datadir = os.path.join(sys.prefix, 'share/proj') if os.path.exists(whl_datadir): os.environ['PROJ_LIB'] = whl_datadir log.debug("Set PROJ_LIB = %r", whl_datadir) elif os.path.exists(share_datadir): os.environ['PROJ_LIB'] = share_datadir log.debug("Set PROJ_LIB = %r", share_datadir) else: log.warning("PROJ data files not located, PROJ_LIB not set") for key, val in self.options.items(): key_b = key.upper().encode('utf-8') key_c = key_b if isinstance(val, string_types): val_b = val.encode('utf-8') else: val_b = ('ON' if val else 'OFF').encode('utf-8') val_c = val_b CPLSetThreadLocalConfigOption(key_c, val_c) log.debug("Option %s=%s", key, CPLGetConfigOption(key_c, NULL)) return self def stop(self): cdef const char *key_c = NULL for key in self.options: key_b = key.upper().encode('utf-8') key_c = key_b CPLSetThreadLocalConfigOption(key_c, NULL) CPLSetErrorHandler(NULL) def drivers(self): cdef void *drv = NULL cdef const char *key = NULL cdef const char *val = NULL cdef int i result = {} for i in range(OGRGetDriverCount()): drv = OGRGetDriver(i) key = OGR_Dr_GetName(drv) key_b = key val = OGR_Dr_GetName(drv) val_b = val result[key_b.decode('utf-8')] = val_b.decode('utf-8') return result Fiona-1.7.10/fiona/_err.pxd000066400000000000000000000000631317446052300154000ustar00rootroot00000000000000cdef void *exc_wrap_pointer(void *ptr) except NULL Fiona-1.7.10/fiona/_err.pyx000066400000000000000000000131151317446052300154270ustar00rootroot00000000000000"""fiona._err Transformation of GDAL C API errors to Python exceptions using Python's ``with`` statement and an error-handling context manager class. The ``cpl_errs`` error-handling context manager is intended for use in Rasterio's Cython code. When entering the body of a ``with`` statement, the context manager clears GDAL's error stack. On exit, the context manager pops the last error off the stack and raises an appropriate Python exception. It's otherwise pretty difficult to do this kind of thing. I couldn't make it work with a CPL error handler, Cython's C code swallows exceptions raised from C callbacks. When used to wrap a call to open a PNG in update mode with cpl_errs: cdef void *hds = GDALOpen('file.png', 1) if hds == NULL: raise ValueError("NULL dataset") the ValueError of last resort never gets raised because the context manager raises a more useful and informative error: Traceback (most recent call last): File "/Users/sean/code/rasterio/scripts/rio_insp", line 65, in with rasterio.open(args.src, args.mode) as src: File "/Users/sean/code/rasterio/rasterio/__init__.py", line 111, in open s.start() ValueError: The PNG driver does not support update access to existing datasets. """ from __future__ import absolute_import # CPL function declarations. cdef extern from "cpl_error.h": ctypedef enum CPLErr: CE_None CE_Debug CE_Warning CE_Failure CE_Fatal int CPLGetLastErrorNo() const char* CPLGetLastErrorMsg() int CPLGetLastErrorType() void CPLErrorReset() from enum import IntEnum # Python exceptions expressing the CPL error numbers. class CPLE_BaseError(Exception): """Base CPL error class Exceptions deriving from this class are intended for use only in Rasterio's Cython code. Let's not expose API users to them. """ def __init__(self, error, errno, errmsg): self.error = error self.errno = errno self.errmsg = errmsg def __str__(self): return self.__unicode__() def __unicode__(self): return "{}".format(self.errmsg) @property def args(self): return self.error, self.errno, self.errmsg class CPLE_AppDefinedError(CPLE_BaseError): pass class CPLE_OutOfMemoryError(CPLE_BaseError): pass class CPLE_FileIOError(CPLE_BaseError): pass class CPLE_OpenFailedError(CPLE_BaseError): pass class CPLE_IllegalArgError(CPLE_BaseError): pass class CPLE_NotSupportedError(CPLE_BaseError): pass class CPLE_AssertionFailedError(CPLE_BaseError): pass class CPLE_NoWriteAccessError(CPLE_BaseError): pass class CPLE_UserInterruptError(CPLE_BaseError): pass class ObjectNullError(CPLE_BaseError): pass class CPLE_HttpResponseError(CPLE_BaseError): pass class CPLE_AWSBucketNotFoundError(CPLE_BaseError): pass class CPLE_AWSObjectNotFoundError(CPLE_BaseError): pass class CPLE_AWSAccessDeniedError(CPLE_BaseError): pass class CPLE_AWSInvalidCredentialsError(CPLE_BaseError): pass class CPLE_AWSSignatureDoesNotMatchError(CPLE_BaseError): pass # Map of GDAL error numbers to the Python exceptions. exception_map = { 1: CPLE_AppDefinedError, 2: CPLE_OutOfMemoryError, 3: CPLE_FileIOError, 4: CPLE_OpenFailedError, 5: CPLE_IllegalArgError, 6: CPLE_NotSupportedError, 7: CPLE_AssertionFailedError, 8: CPLE_NoWriteAccessError, 9: CPLE_UserInterruptError, 10: ObjectNullError, # error numbers 11-16 are introduced in GDAL 2.1. See # https://github.com/OSGeo/gdal/pull/98. 11: CPLE_HttpResponseError, 12: CPLE_AWSBucketNotFoundError, 13: CPLE_AWSObjectNotFoundError, 14: CPLE_AWSAccessDeniedError, 15: CPLE_AWSInvalidCredentialsError, 16: CPLE_AWSSignatureDoesNotMatchError} # CPL Error types as an enum. class GDALError(IntEnum): none = CE_None debug = CE_Debug warning = CE_Warning failure = CE_Failure fatal = CE_Fatal cdef class GDALErrCtxManager: """A manager for GDAL error handling contexts.""" def __enter__(self): CPLErrorReset() return self def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): cdef int err_type = CPLGetLastErrorType() cdef int err_no = CPLGetLastErrorNo() cdef const char *msg = CPLGetLastErrorMsg() # TODO: warn for err_type 2? if err_type >= 2: raise exception_map[err_no](err_type, err_no, msg) cdef inline object exc_check(): """Checks GDAL error stack for fatal or non-fatal errors Returns ------- An Exception, SystemExit, or None """ cdef const char *msg_c = NULL err_type = CPLGetLastErrorType() err_no = CPLGetLastErrorNo() err_msg = CPLGetLastErrorMsg() if err_msg == NULL: msg = "No error message." else: # Reformat messages. msg_b = err_msg msg = msg_b.decode('utf-8') msg = msg.replace("`", "'") msg = msg.replace("\n", " ") if err_type == 3: CPLErrorReset() return exception_map.get( err_no, CPLE_BaseError)(err_type, err_no, msg) if err_type == 4: return SystemExit("Fatal error: {0}".format((err_type, err_no, msg))) else: return cdef void *exc_wrap_pointer(void *ptr) except NULL: """Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *) Raises a Rasterio exception if a non-fatal error has be set. """ if ptr == NULL: exc = exc_check() if exc: raise exc return NULL return ptr cpl_errs = GDALErrCtxManager() Fiona-1.7.10/fiona/_geometry.pxd000066400000000000000000000052241317446052300164470ustar00rootroot00000000000000# Geometry API functions. ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY cdef extern from "ogr_api.h": OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) cdef class GeomBuilder: cdef void *geom cdef object code cdef object geomtypename cdef object ndims cdef _buildCoords(self, void *geom) cpdef _buildPoint(self) cpdef _buildLineString(self) cpdef _buildLinearRing(self) cdef _buildParts(self, void *geom) cpdef _buildPolygon(self) cpdef _buildMultiPoint(self) cpdef _buildMultiLineString(self) cpdef _buildMultiPolygon(self) cpdef _buildGeometryCollection(self) cdef build(self, void *geom) cpdef build_wkb(self, object wkb) cdef class OGRGeomBuilder: cdef void * _createOgrGeometry(self, int geom_type) except NULL cdef _addPointToGeometry(self, void *cogr_geometry, object coordinate) cdef void * _buildPoint(self, object coordinates) except NULL cdef void * _buildLineString(self, object coordinates) except NULL cdef void * _buildLinearRing(self, object coordinates) except NULL cdef void * _buildPolygon(self, object coordinates) except NULL cdef void * _buildMultiPoint(self, object coordinates) except NULL cdef void * _buildMultiLineString(self, object coordinates) except NULL cdef void * _buildMultiPolygon(self, object coordinates) except NULL cdef void * _buildGeometryCollection(self, object coordinates) except NULL cdef void * build(self, object geom) except NULL cdef unsigned int geometry_type_code(object name) cdef object normalize_geometry_type_code(unsigned int code) Fiona-1.7.10/fiona/_geometry.pyx000066400000000000000000000267041317446052300165020ustar00rootroot00000000000000# Coordinate and geometry transformations. from __future__ import absolute_import import logging from fiona.errors import UnsupportedGeometryTypeError class NullHandler(logging.Handler): def emit(self, record): pass log = logging.getLogger("Fiona") log.addHandler(NullHandler()) # Mapping of OGR integer geometry types to GeoJSON type names. GEOMETRY_TYPES = { 0: 'Unknown', 1: 'Point', 2: 'LineString', 3: 'Polygon', 4: 'MultiPoint', 5: 'MultiLineString', 6: 'MultiPolygon', 7: 'GeometryCollection', # Unsupported types. #8: 'CircularString', #9: 'CompoundCurve', #10: 'CurvePolygon', #11: 'MultiCurve', #12: 'MultiSurface', #13: 'Curve', #14: 'Surface', #15: 'PolyhedralSurface', #16: 'TIN', #17: 'Triangle', 100: 'None', 101: 'LinearRing', 0x80000001: '3D Point', 0x80000002: '3D LineString', 0x80000003: '3D Polygon', 0x80000004: '3D MultiPoint', 0x80000005: '3D MultiLineString', 0x80000006: '3D MultiPolygon', 0x80000007: '3D GeometryCollection' } # mapping of GeoJSON type names to OGR integer geometry types GEOJSON2OGR_GEOMETRY_TYPES = dict((v, k) for k, v in GEOMETRY_TYPES.iteritems()) cdef unsigned int geometry_type_code(name): """Map OGC geometry type names to integer codes.""" offset = 0 if name.endswith('ZM'): offset = 3000 elif name.endswith('M'): offset = 2000 elif name.endswith('Z'): offset = 1000 normalized_name = name.rstrip('ZM') if normalized_name not in GEOJSON2OGR_GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(name) return offset + GEOJSON2OGR_GEOMETRY_TYPES[normalized_name] cdef object normalize_geometry_type_code(unsigned int code): """Normalize geometry type codes.""" # Remove 2.5D flag. norm_code = code & (~0x80000000) # Normalize Z, M, and ZM types. Fiona 1.x does not support M # and doesn't treat OGC 'Z' variants as special types of their # own. norm_code = norm_code % 1000 if norm_code not in GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(norm_code) return norm_code # Geometry related functions and classes follow. cdef void * _createOgrGeomFromWKB(object wkb) except NULL: """Make an OGR geometry from a WKB string""" wkbtype = bytearray(wkb)[1] cdef unsigned char *buffer = wkb cdef void *cogr_geometry = OGR_G_CreateGeometry(wkbtype) if cogr_geometry is not NULL: OGR_G_ImportFromWkb(cogr_geometry, buffer, len(wkb)) return cogr_geometry cdef _deleteOgrGeom(void *cogr_geometry): """Delete an OGR geometry""" if cogr_geometry is not NULL: OGR_G_DestroyGeometry(cogr_geometry) cogr_geometry = NULL cdef class GeomBuilder: """Builds Fiona (GeoJSON) geometries from an OGR geometry handle. """ cdef _buildCoords(self, void *geom): # Build a coordinate sequence cdef int i if geom == NULL: raise ValueError("Null geom") npoints = OGR_G_GetPointCount(geom) coords = [] for i in range(npoints): values = [OGR_G_GetX(geom, i), OGR_G_GetY(geom, i)] if self.ndims > 2: values.append(OGR_G_GetZ(geom, i)) coords.append(tuple(values)) return coords cpdef _buildPoint(self): return {'type': 'Point', 'coordinates': self._buildCoords(self.geom)[0]} cpdef _buildLineString(self): return {'type': 'LineString', 'coordinates': self._buildCoords(self.geom)} cpdef _buildLinearRing(self): return {'type': 'LinearRing', 'coordinates': self._buildCoords(self.geom)} cdef _buildParts(self, void *geom): cdef int j cdef void *part if geom == NULL: raise ValueError("Null geom") parts = [] for j in range(OGR_G_GetGeometryCount(geom)): part = OGR_G_GetGeometryRef(geom, j) parts.append(GeomBuilder().build(part)) return parts cpdef _buildPolygon(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'Polygon', 'coordinates': coordinates} cpdef _buildMultiPoint(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiPoint', 'coordinates': coordinates} cpdef _buildMultiLineString(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiLineString', 'coordinates': coordinates} cpdef _buildMultiPolygon(self): coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] return {'type': 'MultiPolygon', 'coordinates': coordinates} cpdef _buildGeometryCollection(self): parts = self._buildParts(self.geom) return {'type': 'GeometryCollection', 'geometries': parts} cdef build(self, void *geom): # The only method anyone needs to call if geom == NULL: raise ValueError("Null geom") cdef unsigned int etype = OGR_G_GetGeometryType(geom) # Remove 2.5D flag. self.code = etype & (~0x80000000) # Normalize Z, M, and ZM types. Fiona 1.x does not support M # and doesn't treat OGC 'Z' variants as special types of their # own. self.code = self.code % 1000 if self.code not in GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(self.code) self.geomtypename = GEOMETRY_TYPES[self.code] self.ndims = OGR_G_GetCoordinateDimension(geom) self.geom = geom return getattr(self, '_build' + self.geomtypename)() cpdef build_wkb(self, object wkb): # The only other method anyone needs to call cdef object data = wkb cdef void *cogr_geometry = _createOgrGeomFromWKB(data) result = self.build(cogr_geometry) _deleteOgrGeom(cogr_geometry) return result cdef class OGRGeomBuilder: """Builds OGR geometries from Fiona geometries. """ cdef void * _createOgrGeometry(self, int geom_type) except NULL: cdef void *cogr_geometry = OGR_G_CreateGeometry(geom_type) if cogr_geometry == NULL: raise Exception("Could not create OGR Geometry of type: %i" % geom_type) return cogr_geometry cdef _addPointToGeometry(self, void *cogr_geometry, object coordinate): if len(coordinate) == 2: x, y = coordinate OGR_G_AddPoint_2D(cogr_geometry, x, y) else: x, y, z = coordinate[:3] OGR_G_AddPoint(cogr_geometry, x, y, z) cdef void * _buildPoint(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Point']) self._addPointToGeometry(cogr_geometry, coordinates) return cogr_geometry cdef void * _buildLineString(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LineString']) for coordinate in coordinates: log.debug("Adding point %s", coordinate) self._addPointToGeometry(cogr_geometry, coordinate) return cogr_geometry cdef void * _buildLinearRing(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LinearRing']) for coordinate in coordinates: log.debug("Adding point %s", coordinate) self._addPointToGeometry(cogr_geometry, coordinate) log.debug("Closing ring") OGR_G_CloseRings(cogr_geometry) return cogr_geometry cdef void * _buildPolygon(self, object coordinates) except NULL: cdef void *cogr_ring cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Polygon']) for ring in coordinates: log.debug("Adding ring %s", ring) cogr_ring = self._buildLinearRing(ring) log.debug("Built ring") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_ring) log.debug("Added ring %s", ring) return cogr_geometry cdef void * _buildMultiPoint(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPoint']) for coordinate in coordinates: log.debug("Adding point %s", coordinate) cogr_part = self._buildPoint(coordinate) OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) log.debug("Added point %s", coordinate) return cogr_geometry cdef void * _buildMultiLineString(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiLineString']) for line in coordinates: log.debug("Adding line %s", line) cogr_part = self._buildLineString(line) log.debug("Built line") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) log.debug("Added line %s", line) return cogr_geometry cdef void * _buildMultiPolygon(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPolygon']) for part in coordinates: log.debug("Adding polygon %s", part) cogr_part = self._buildPolygon(part) log.debug("Built polygon") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) log.debug("Added polygon %s", part) return cogr_geometry cdef void * _buildGeometryCollection(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['GeometryCollection']) for part in coordinates: log.debug("Adding part %s", part) cogr_part = OGRGeomBuilder().build(part) log.debug("Built part") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) log.debug("Added part %s", part) return cogr_geometry cdef void * build(self, object geometry) except NULL: cdef object typename = geometry['type'] cdef object coordinates = geometry.get('coordinates') if typename == 'Point': return self._buildPoint(coordinates) elif typename == 'LineString': return self._buildLineString(coordinates) elif typename == 'LinearRing': return self._buildLinearRing(coordinates) elif typename == 'Polygon': return self._buildPolygon(coordinates) elif typename == 'MultiPoint': return self._buildMultiPoint(coordinates) elif typename == 'MultiLineString': return self._buildMultiLineString(coordinates) elif typename == 'MultiPolygon': return self._buildMultiPolygon(coordinates) elif typename == 'GeometryCollection': coordinates = geometry.get('geometries') return self._buildGeometryCollection(coordinates) else: raise ValueError("Unsupported geometry type %s" % typename) cdef geometry(void *geom): """Factory for Fiona geometries""" return GeomBuilder().build(geom) def geometryRT(geometry): # For testing purposes only, leaks the JSON data cdef void *cogr_geometry = OGRGeomBuilder().build(geometry) result = GeomBuilder().build(cogr_geometry) _deleteOgrGeom(cogr_geometry) return result Fiona-1.7.10/fiona/_transform.pyx000066400000000000000000000131521317446052300166530ustar00rootroot00000000000000# distutils: language = c++ # # Coordinate and geometry transformations. from __future__ import absolute_import import logging from fiona cimport _cpl, _crs, _csl, _geometry from fiona._crs cimport OGRSpatialReferenceH cdef extern from "ogr_geometry.h" nogil: cdef cppclass OGRGeometry: pass cdef cppclass OGRGeometryFactory: void * transformWithOptions(void *geom, void *ct, char **options) cdef extern from "ogr_spatialref.h": cdef cppclass OGRCoordinateTransformation: pass log = logging.getLogger("Fiona") class NullHandler(logging.Handler): def emit(self, record): pass log.addHandler(NullHandler()) cdef void *_crs_from_crs(object crs): cdef char *proj_c = NULL cdef OGRSpatialReferenceH osr = NULL osr = _crs.OSRNewSpatialReference(NULL) if osr == NULL: raise ValueError("NULL spatial reference") params = [] # Normally, we expect a CRS dict. if isinstance(crs, dict): # EPSG is a special case. init = crs.get('init') if init: auth, val = init.split(':') if auth.upper() == 'EPSG': _crs.OSRImportFromEPSG(osr, int(val)) else: crs['wktext'] = True for k, v in crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) log.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b _crs.OSRImportFromProj4(osr, proj_c) # Fall back for CRS strings like "EPSG:3857." else: proj_b = crs.encode('utf-8') proj_c = proj_b _crs.OSRSetFromUserInput(osr, proj_c) return osr def _transform(src_crs, dst_crs, xs, ys): cdef double *x cdef double *y cdef char *proj_c = NULL cdef OGRSpatialReferenceH src = NULL cdef OGRSpatialReferenceH dst = NULL cdef void *transform = NULL cdef int i assert len(xs) == len(ys) src = _crs_from_crs(src_crs) dst = _crs_from_crs(dst_crs) n = len(xs) x = _cpl.CPLMalloc(n*sizeof(double)) y = _cpl.CPLMalloc(n*sizeof(double)) for i in range(n): x[i] = xs[i] y[i] = ys[i] transform = _crs.OCTNewCoordinateTransformation(src, dst) res = _crs.OCTTransform(transform, n, x, y, NULL) res_xs = [0]*n res_ys = [0]*n for i in range(n): res_xs[i] = x[i] res_ys[i] = y[i] _cpl.CPLFree(x) _cpl.CPLFree(y) _crs.OCTDestroyCoordinateTransformation(transform) _crs.OSRRelease(src) _crs.OSRRelease(dst) return res_xs, res_ys def _transform_geom( src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, precision): """Return a transformed geometry.""" cdef char *proj_c = NULL cdef char *key_c = NULL cdef char *val_c = NULL cdef char **options = NULL cdef OGRSpatialReferenceH src = NULL cdef OGRSpatialReferenceH dst = NULL cdef void *transform = NULL cdef OGRGeometryFactory *factory = NULL cdef void *src_ogr_geom = NULL cdef void *dst_ogr_geom = NULL cdef int i if src_crs and dst_crs: src = _crs_from_crs(src_crs) dst = _crs_from_crs(dst_crs) transform = _crs.OCTNewCoordinateTransformation(src, dst) # Transform options. options = _csl.CSLSetNameValue( options, "DATELINEOFFSET", str(antimeridian_offset).encode('utf-8')) if antimeridian_cutting: options = _csl.CSLSetNameValue(options, "WRAPDATELINE", "YES") factory = new OGRGeometryFactory() src_ogr_geom = _geometry.OGRGeomBuilder().build(geom) dst_ogr_geom = factory.transformWithOptions( src_ogr_geom, transform, options) g = _geometry.GeomBuilder().build(dst_ogr_geom) _geometry.OGR_G_DestroyGeometry(dst_ogr_geom) _geometry.OGR_G_DestroyGeometry(src_ogr_geom) _crs.OCTDestroyCoordinateTransformation(transform) if options != NULL: _csl.CSLDestroy(options) _crs.OSRRelease(src) _crs.OSRRelease(dst) else: g = geom if precision >= 0: if g['type'] == 'Point': x, y = g['coordinates'] x = round(x, precision) y = round(y, precision) new_coords = [x, y] elif g['type'] in ['LineString', 'MultiPoint']: xp, yp = zip(*g['coordinates']) xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] new_coords = list(zip(xp, yp)) elif g['type'] in ['Polygon', 'MultiLineString']: new_coords = [] for piece in g['coordinates']: xp, yp = zip(*piece) xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] new_coords.append(list(zip(xp, yp))) elif g['type'] == 'MultiPolygon': parts = g['coordinates'] new_coords = [] for part in parts: inner_coords = [] for ring in part: xp, yp = zip(*ring) xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] inner_coords.append(list(zip(xp, yp))) new_coords.append(inner_coords) g['coordinates'] = new_coords return g Fiona-1.7.10/fiona/collection.py000066400000000000000000000432301317446052300164440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Collections provide file-like access to feature data import logging import os import warnings from fiona import compat from fiona.ogrext import Iterator, ItemsIterator, KeysIterator from fiona.ogrext import Session, WritingSession from fiona.ogrext import ( calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name) from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file from fiona.errors import DriverError, SchemaError, CRSError from fiona._drivers import driver_count, GDALEnv from fiona.drvsupport import supported_drivers from six import string_types, binary_type log = logging.getLogger(__name__) class Collection(object): """A file-like interface to features of a vector dataset Python text file objects are iterators over lines of a file. Fiona Collections are similar iterators (not lists!) over features represented as GeoJSON-like mappings. """ def __init__(self, path, mode='r', driver=None, schema=None, crs=None, encoding=None, layer=None, vsi=None, archive=None, enabled_drivers=None, crs_wkt=None, **kwargs): """The required ``path`` is the absolute or relative path to a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can be read only. In ``mode`` 'a', data can be appended to a file. In ``mode`` 'w', data overwrites the existing contents of a file. In ``mode`` 'w', an OGR ``driver`` name and a ``schema`` are required. A Proj4 ``crs`` string is recommended. If both ``crs`` and ``crs_wkt`` keyword arguments are passed, the latter will trump the former. In 'w' mode, kwargs will be mapped to OGR layer creation options. """ if not isinstance(path, string_types): raise TypeError("invalid path: %r" % path) if not isinstance(mode, string_types) or mode not in ('r', 'w', 'a'): raise TypeError("invalid mode: %r" % mode) if driver and not isinstance(driver, string_types): raise TypeError("invalid driver: %r" % driver) if schema and not hasattr(schema, 'get'): raise TypeError("invalid schema: %r" % schema) if crs and not isinstance(crs, compat.DICT_TYPES + string_types): raise TypeError("invalid crs: %r" % crs) if crs_wkt and not isinstance(crs_wkt, string_types): raise TypeError("invalid crs_wkt: %r" % crs_wkt) if encoding and not isinstance(encoding, string_types): raise TypeError("invalid encoding: %r" % encoding) if layer and not isinstance(layer, tuple(list(string_types) + [int])): raise TypeError("invalid name: %r" % layer) if vsi: if not isinstance(vsi, string_types) or vsi not in ('zip', 'tar', 'gzip'): raise TypeError("invalid vsi: %r" % vsi) if archive and not isinstance(archive, string_types): raise TypeError("invalid archive: %r" % archive) # Check GDAL version against drivers if (driver == "GPKG" and get_gdal_version_num() < calc_gdal_version_num(1, 11, 0)): raise DriverError( "GPKG driver requires GDAL 1.11.0, fiona was compiled " "against: {}".format(get_gdal_release_name())) self.session = None self.iterator = None self._len = 0 self._bounds = None self._driver = None self._schema = None self._crs = None self._crs_wkt = None self.env = None self.enabled_drivers = enabled_drivers self.path = vsi_path(path, vsi, archive) if mode == 'w': if layer and not isinstance(layer, string_types): raise ValueError("in 'r' mode, layer names must be strings") if driver == 'GeoJSON': if layer is not None: raise ValueError("the GeoJSON format does not have layers") self.name = 'OgrGeoJSON' # TODO: raise ValueError as above for other single-layer formats. else: self.name = layer or os.path.basename(os.path.splitext(path)[0]) else: if layer in (0, None): self.name = 0 else: self.name = layer or os.path.basename(os.path.splitext(path)[0]) self.mode = mode if self.mode == 'w': if driver == 'Shapefile': driver = 'ESRI Shapefile' if not driver: raise DriverError("no driver") elif driver not in supported_drivers: raise DriverError( "unsupported driver: %r" % driver) elif self.mode not in supported_drivers[driver]: raise DriverError( "unsupported mode: %r" % self.mode) self._driver = driver if not schema: raise SchemaError("no schema") elif 'properties' not in schema: raise SchemaError("schema lacks: properties") elif 'geometry' not in schema: raise SchemaError("schema lacks: geometry") self._schema = schema if crs_wkt: self._crs_wkt = crs_wkt elif crs: if 'init' in crs or 'proj' in crs or 'epsg' in crs.lower(): self._crs = crs else: raise CRSError("crs lacks init or proj parameter") if driver_count == 0: # create a local manager and enter self.env = GDALEnv() else: self.env = GDALEnv() self.env.__enter__() self._driver = driver self.encoding = encoding try: if self.mode == 'r': self.session = Session() self.session.start(self) elif self.mode in ('a', 'w'): self.session = WritingSession() self.session.start(self, **kwargs) except IOError: self.session = None raise if self.session is not None: self.guard_driver_mode() if not self.encoding: self.encoding = self.session.get_fileencoding().lower() def __repr__(self): return "<%s Collection '%s', mode '%s' at %s>" % ( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, hex(id(self))) def guard_driver_mode(self): driver = self.session.get_driver() if driver not in supported_drivers: raise DriverError("unsupported driver: %r" % driver) if self.mode not in supported_drivers[driver]: raise DriverError("unsupported mode: %r" % self.mode) @property def driver(self): """Returns the name of the proper OGR driver.""" if not self._driver and self.mode in ("a", "r") and self.session: self._driver = self.session.get_driver() return self._driver @property def schema(self): """Returns a mapping describing the data schema. The mapping has 'geometry' and 'properties' items. The former is a string such as 'Point' and the latter is an ordered mapping that follows the order of fields in the data file. """ if not self._schema and self.mode in ("a", "r") and self.session: self._schema = self.session.get_schema() return self._schema @property def crs(self): """Returns a Proj4 string.""" if self._crs is None and self.session: self._crs = self.session.get_crs() return self._crs @property def crs_wkt(self): """Returns a WKT string.""" if self._crs_wkt is None and self.session: self._crs_wkt = self.session.get_crs_wkt() return self._crs_wkt @property def meta(self): """Returns a mapping with the driver, schema, crs, and additional properties.""" return { 'driver': self.driver, 'schema': self.schema, 'crs': self.crs, 'crs_wkt': self.crs_wkt} profile = meta def filter(self, *args, **kwds): """Returns an iterator over records, but filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = Iterator( self, start, stop, step, bbox, mask) return self.iterator def items(self, *args, **kwds): """Returns an iterator over FID, record pairs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = ItemsIterator( self, start, stop, step, bbox, mask) return self.iterator def keys(self, *args, **kwds): """Returns an iterator over FIDs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry ``mask``. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. """ if self.closed: raise ValueError("I/O operation on closed collection") elif self.mode != 'r': raise IOError("collection not open for reading") if args: s = slice(*args) start = s.start stop = s.stop step = s.step else: start = stop = step = None bbox = kwds.get('bbox') mask = kwds.get('mask') if bbox and mask: raise ValueError("mask and bbox can not be set together") self.iterator = KeysIterator( self, start, stop, step, bbox, mask) return self.iterator def __contains__(self, fid): return self.session.has_feature(fid) values = filter def __iter__(self): """Returns an iterator over records.""" return self.filter() def __next__(self): """Returns next record from iterator.""" warnings.warn("Collection.__next__() is buggy and will be removed in " "Fiona 2.0. Switch to `next(iter(collection))`.", DeprecationWarning, stacklevel=2) if not self.iterator: iter(self) return next(self.iterator) next = __next__ def __getitem__(self, item): return self.session.__getitem__(item) def writerecords(self, records): """Stages multiple records for writing to disk.""" if self.closed: raise ValueError("I/O operation on closed collection") if self.mode not in ('a', 'w'): raise IOError("collection not open for writing") self.session.writerecs(records, self) self._len = self.session.get_length() self._bounds = self.session.get_extent() def write(self, record): """Stages a record for writing to disk.""" self.writerecords([record]) def validate_record(self, record): """Compares the record to the collection's schema. Returns ``True`` if the record matches, else ``False``. """ # Currently we only compare keys of properties, not the types of # values. return ( set(record['properties'].keys()) == set(self.schema['properties'].keys()) and self.validate_record_geometry(record)) def validate_record_geometry(self, record): """Compares the record's geometry to the collection's schema. Returns ``True`` if the record matches, else ``False``. """ # Shapefiles welcome mixes of line/multis and polygon/multis. # OGR reports these mixed files as type "Polygon" or "LineString" # but will return either these or their multi counterparts when # reading features. if (self.driver == "ESRI Shapefile" and "Point" not in record['geometry']['type']): return record['geometry']['type'].lstrip( "Multi") == self.schema['geometry'].lstrip("3D ").lstrip( "Multi") else: return ( record['geometry']['type'] == self.schema['geometry'].lstrip("3D ")) def __len__(self): if self._len <= 0 and self.session is not None: self._len = self.session.get_length() if self._len < 0: # Raise TypeError when we don't know the length so that Python # will treat Collection as a generator raise TypeError("Layer does not support counting") return self._len @property def bounds(self): """Returns (minx, miny, maxx, maxy).""" if self._bounds is None and self.session is not None: self._bounds = self.session.get_extent() return self._bounds def flush(self): """Flush the buffer.""" if self.session is not None: self.session.sync(self) new_len = self.session.get_length() self._len = new_len > self._len and new_len or self._len self._bounds = self.session.get_extent() def close(self): """In append or write mode, flushes data to disk, then ends access.""" if self.session is not None: if self.mode in ('a', 'w'): self.flush() log.debug("Flushed buffer") self.session.stop() log.debug("Stopped session") self.session = None self.iterator = None if self.env: self.env.__exit__() @property def closed(self): """``False`` if data can be accessed, otherwise ``True``.""" return self.session is None def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __del__(self): # Note: you can't count on this being called. Call close() explicitly # or use the context manager protocol ("with"). self.close() def get_filetype(bytesbuf): """Detect compression type of bytesbuf. ZIP only. TODO: add others relevant to GDAL/OGR.""" if bytesbuf[:4].startswith(b'PK\x03\x04'): return 'zip' else: return '' class BytesCollection(Collection): """BytesCollection takes a buffer of bytes and maps that to a virtual file that can then be opened by fiona. """ def __init__(self, bytesbuf, **kwds): """Takes buffer of bytes whose contents is something we'd like to open with Fiona and maps it to a virtual file. """ if not isinstance(bytesbuf, binary_type): raise ValueError("input buffer must be bytes") # Hold a reference to the buffer, as bad things will happen if # it is garbage collected while in use. self.bytesbuf = bytesbuf # Map the buffer to a file. If the buffer contains a zipfile # we take extra steps in naming the buffer and in opening # it. If the requested driver is for GeoJSON, we append an an # appropriate extension to ensure the driver reads it. filetype = get_filetype(self.bytesbuf) ext = '' if filetype == 'zip': ext = '.zip' elif kwds.get('driver') == "GeoJSON": ext = '.json' self.virtual_file = buffer_to_virtual_file(self.bytesbuf, ext=ext) # Instantiate the parent class. super(BytesCollection, self).__init__(self.virtual_file, vsi=filetype, encoding='utf-8', **kwds) def close(self): """Removes the virtual file associated with the class.""" super(BytesCollection, self).close() if self.virtual_file: remove_virtual_file(self.virtual_file) self.virtual_file = None self.bytesbuf = None def __repr__(self): return "<%s BytesCollection '%s', mode '%s' at %s>" % ( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, hex(id(self))) def vsi_path(path, vsi=None, archive=None): # If a VSF and archive file are specified, we convert the path to # an OGR VSI path (see cpl_vsi.h). if vsi: if archive: result = '/vsi{0}/{1}{2}'.format(vsi, archive, path) else: result = '/vsi{0}/{1}'.format(vsi, path) else: result = path return result Fiona-1.7.10/fiona/compat.py000066400000000000000000000006531317446052300155760ustar00rootroot00000000000000import collections from six.moves import UserDict try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict # Users can pass in objects that subclass a few different objects # More specifically, rasterio has a CRS() class that subclasses UserDict() # In Python 2 UserDict() is in its own module and does not subclass Mapping() DICT_TYPES = (dict, collections.Mapping, UserDict) Fiona-1.7.10/fiona/crs.py000066400000000000000000000124321317446052300151000ustar00rootroot00000000000000"""Coordinate reference systems and functions PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4 coordinate reference systems are described by strings of parameters such as +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs here we use mappings: {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True} """ from six import string_types def to_string(crs): """Turn a parameter mapping into a more conventional PROJ.4 string. Mapping keys are tested against the ``all_proj_keys`` list. Values of ``True`` are omitted, leaving the key bare: {'no_defs': True} -> "+no_defs" and items where the value is otherwise not a str, int, or float are omitted. """ items = [] for k, v in sorted(filter( lambda x: x[0] in all_proj_keys and x[1] is not False and ( isinstance(x[1], (bool, int, float)) or isinstance(x[1], string_types)), crs.items())): items.append( "+" + "=".join( map(str, filter( lambda y: (y or y == 0) and y is not True, (k, v))))) return " ".join(items) def from_string(prjs): """Turn a PROJ.4 string into a mapping of parameters. Bare parameters like "+no_defs" are given a value of ``True``. All keys are checked against the ``all_proj_keys`` list. """ parts = [o.lstrip('+') for o in prjs.strip().split()] def parse(v): try: return int(v) except ValueError: pass try: return float(v) except ValueError: return v items = map( lambda kv: len(kv) == 2 and (kv[0], parse(kv[1])) or (kv[0], True), (p.split('=') for p in parts)) return dict((k, v) for k, v in items if k in all_proj_keys) def from_epsg(code): """Given an integer code, returns an EPSG-like mapping. Note: the input code is not validated against an EPSG database. """ if int(code) <= 0: raise ValueError("EPSG codes are positive integers") return {'init': "epsg:%s" % code, 'no_defs': True} # Below is the big list of PROJ4 parameters from # http://trac.osgeo.org/proj/wiki/GenParms. # It is parsed into a list of paramter keys ``all_proj_keys``. _param_data = """ +a Semimajor radius of the ellipsoid axis +alpha ? Used with Oblique Mercator and possibly a few others +axis Axis orientation (new in 4.8.0) +b Semiminor radius of the ellipsoid axis +datum Datum name (see `proj -ld`) +ellps Ellipsoid name (see `proj -le`) +init Initialize from a named CRS +k Scaling factor (old name) +k_0 Scaling factor (new name) +lat_0 Latitude of origin +lat_1 Latitude of first standard parallel +lat_2 Latitude of second standard parallel +lat_ts Latitude of true scale +lon_0 Central meridian +lonc ? Longitude used with Oblique Mercator and possibly a few others +lon_wrap Center longitude to use for wrapping (see below) +nadgrids Filename of NTv2 grid file to use for datum transforms (see below) +no_defs Don't use the /usr/share/proj/proj_def.dat defaults file +over Allow longitude output outside -180 to 180 range, disables wrapping (see below) +pm Alternate prime meridian (typically a city name, see below) +proj Projection name (see `proj -l`) +south Denotes southern hemisphere UTM zone +to_meter Multiplier to convert map units to 1.0m +towgs84 3 or 7 term datum transform parameters (see below) +units meters, US survey feet, etc. +vto_meter vertical conversion to meters. +vunits vertical units. +x_0 False easting +y_0 False northing +zone UTM zone +a Semimajor radius of the ellipsoid axis +alpha ? Used with Oblique Mercator and possibly a few others +azi +b Semiminor radius of the ellipsoid axis +belgium +beta +czech +e Eccentricity of the ellipsoid = sqrt(1 - b^2/a^2) = sqrt( f*(2-f) ) +ellps Ellipsoid name (see `proj -le`) +es Eccentricity of the ellipsoid squared +f Flattening of the ellipsoid (often presented as an inverse, e.g. 1/298) +gamma +geoc +guam +h +k Scaling factor (old name) +K +k_0 Scaling factor (new name) +lat_0 Latitude of origin +lat_1 Latitude of first standard parallel +lat_2 Latitude of second standard parallel +lat_b +lat_t +lat_ts Latitude of true scale +lon_0 Central meridian +lon_1 +lon_2 +lonc ? Longitude used with Oblique Mercator and possibly a few others +lsat +m +M +n +no_cut +no_off +no_rot +ns +o_alpha +o_lat_1 +o_lat_2 +o_lat_c +o_lat_p +o_lon_1 +o_lon_2 +o_lon_c +o_lon_p +o_proj +over +p +path +proj Projection name (see `proj -l`) +q +R +R_a +R_A Compute radius such that the area of the sphere is the same as the area of the ellipsoid +rf Reciprocal of the ellipsoid flattening term (e.g. 298) +R_g +R_h +R_lat_a +R_lat_g +rot +R_V +s +south Denotes southern hemisphere UTM zone +sym +t +theta +tilt +to_meter Multiplier to convert map units to 1.0m +units meters, US survey feet, etc. +vopt +W +westo +x_0 False easting +y_0 False northing +zone UTM zone +wktext Marker """ _lines = filter(lambda x: len(x) > 1, _param_data.split("\n")) all_proj_keys = list( set(line.split()[0].lstrip("+").strip() for line in _lines)) + ['no_mayo'] Fiona-1.7.10/fiona/drvsupport.py000066400000000000000000000131021317446052300165340ustar00rootroot00000000000000# -*- coding: utf-8 -*- from fiona._drivers import GDALEnv # Here is the list of available drivers as (name, modes) tuples. Currently, # we only expose the defaults (excepting FileGDB). We also don't expose # the CSV or GeoJSON drivers. Use Python's csv and json modules instead. # Might still exclude a few more of these after making a pass through the # entries for each at http://www.gdal.org/ogr/ogr_formats.html to screen # out the multi-layer formats. supported_drivers = dict([ #OGR Vector Formats #Format Name Code Creation Georeferencing Compiled by default #Aeronav FAA files AeronavFAA No Yes Yes ("AeronavFAA", "r"), #ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects #Arc/Info Binary Coverage AVCBin No Yes Yes # multi-layer # ("AVCBin", "r"), #Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes # multi-layer # ("AVCE00", "r"), #Arc/Info Generate ARCGEN No No Yes ("ARCGEN", "r"), #Atlas BNA BNA Yes No Yes ("BNA", "raw"), #AutoCAD DWG DWG No No No #AutoCAD DXF DXF Yes No Yes ("DXF", "raw"), #Comma Separated Value (.csv) CSV Yes No Yes #CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl #DODS/OPeNDAP DODS No Yes No, needs libdap #EDIGEO EDIGEO No Yes Yes # multi-layer? Hard to tell from the OGR docs # ("EDIGEO", "r"), #ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl #ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library # multi-layer ("FileGDB", "raw"), ("OpenFileGDB", "r"), #ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library #ESRI ArcSDE SDE No Yes No, needs ESRI SDE #ESRI Shapefile ESRI Shapefile Yes Yes Yes ("ESRI Shapefile", "raw"), #FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME #GeoJSON GeoJSON Yes Yes Yes ("GeoJSON", "rw"), #Géoconcept Export Geoconcept Yes Yes Yes # multi-layers # ("Geoconcept", "raw"), #Geomedia .mdb Geomedia No No No, needs ODBC library #GeoPackage GPKG Yes Yes No, needs libsqlite3 ("GPKG", "rw"), #GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) #Google Fusion Tables GFT Yes Yes No, needs libcurl #GML GML Yes Yes Yes (read support needs Xerces or libexpat) #GMT GMT Yes Yes Yes ("GMT", "raw"), #GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) #GPX GPX Yes Yes Yes (read support needs libexpat) ("GPX", "raw"), #GRASS GRASS No Yes No, needs libgrass #GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes ("GPSTrackMaker", "raw"), #Hydrographic Transfer Format HTF No Yes Yes # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), #Idrisi Vector (.VCT) Idrisi No Yes Yes ("Idrisi", "r"), #Informix DataBlade IDB Yes Yes No, needs Informix DataBlade #INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) #INGRES INGRES Yes No No, needs INGRESS #KML KML Yes Yes Yes (read support needs libexpat) #LIBKML LIBKML Yes Yes No, needs libkml #Mapinfo File MapInfo File Yes Yes Yes ("MapInfo File", "raw"), #Microstation DGN DGN Yes No Yes ("DGN", "raw"), #Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE #Memory Memory Yes Yes Yes #MySQL MySQL No Yes No, needs MySQL library #NAS - ALKIS NAS No Yes No, needs Xerces #Oracle Spatial OCI Yes Yes No, needs OCI library #ODBC ODBC No Yes No, needs ODBC library #MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library #Open Document Spreadsheet ODS Yes No No, needs libexpat #OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library #OpenAir OpenAir No Yes Yes # multi-layer # ("OpenAir", "r"), #PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) ("PCIDSK", "r"), #PDS PDS No Yes Yes ("PDS", "r"), #PGDump PostgreSQL SQL dump Yes Yes Yes #PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) #EPIInfo .REC REC No No Yes #S-57 (ENC) S57 No Yes Yes # multi-layer # ("S57", "r"), #SDTS SDTS No Yes Yes # multi-layer # ("SDTS", "r"), #SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes # multi-layers # ("SEGUKOOA", "r"), #SEG-Y SEGY No No Yes ("SEGY", "r"), #Norwegian SOSI Standard SOSI No Yes No, needs FYBA library #SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite #SUA SUA No Yes Yes ("SUA", "r"), #SVG SVG No Yes No, needs libexpat #UK .NTF UK. NTF No Yes Yes # multi-layer # ("UK. NTF", "r"), #U.S. Census TIGER/Line TIGER No Yes Yes # multi-layer # ("TIGER", "r"), #VFK data VFK No Yes Yes # multi-layer # ("VFK", "r"), #VRT - Virtual Datasource VRT No Yes Yes # multi-layer # ("VRT", "r"), #OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl #MS Excel format XLS No No No, needs libfreexl #Office Open XML spreadsheet XLSX Yes No No, needs libexpat #X-Plane/Flighgear aeronautical data XPLANE No Yes Yes # multi-layer # ("XPLANE", "r") ]) # Removes drivers in the supported_drivers dictionary that the # machine's installation of OGR due to how it is compiled. # OGR may not have optional libararies compiled or installed. def _filter_supported_drivers(): global supported_drivers gdalenv = GDALEnv() ogrdrv_names = gdalenv.start().drivers().keys() supported_drivers_copy = supported_drivers.copy() for drv in supported_drivers.keys(): if drv not in ogrdrv_names: del supported_drivers_copy[drv] gdalenv.stop() supported_drivers = supported_drivers_copy _filter_supported_drivers() Fiona-1.7.10/fiona/errors.py000066400000000000000000000013711317446052300156250ustar00rootroot00000000000000# Errors. class FionaValueError(ValueError): """Fiona-specific value errors""" class DriverError(FionaValueError): """Encapsulates unsupported driver and driver mode errors.""" class SchemaError(FionaValueError): """When a schema mapping has no properties or no geometry.""" class CRSError(FionaValueError): """When a crs mapping has neither init or proj items.""" class DataIOError(IOError): """IO errors involving driver registration or availability.""" class DriverIOError(IOError): """A format specific driver error.""" class FieldNameEncodeError(UnicodeEncodeError): """Failure to encode a field name.""" class UnsupportedGeometryTypeError(KeyError): """When a OGR geometry type isn't supported by Fiona.""" Fiona-1.7.10/fiona/fio/000077500000000000000000000000001317446052300145125ustar00rootroot00000000000000Fiona-1.7.10/fiona/fio/__init__.py000066400000000000000000000000001317446052300166110ustar00rootroot00000000000000Fiona-1.7.10/fiona/fio/bounds.py000066400000000000000000000063171317446052300163650ustar00rootroot00000000000000"""$ fio bounds""" import json import logging import click from cligj import precision_opt, use_rs_opt import fiona from fiona.fio.helpers import obj_gen @click.command(short_help="Print the extent of GeoJSON objects") @precision_opt @click.option('--explode/--no-explode', default=False, help="Explode collections into features (default: no).") @click.option('--with-id/--without-id', default=False, help="Print GeoJSON ids and bounding boxes together " "(default: without).") @click.option('--with-obj/--without-obj', default=False, help="Print GeoJSON objects and bounding boxes together " "(default: without).") @use_rs_opt @click.pass_context def bounds(ctx, precision, explode, with_id, with_obj, use_rs): """Print the bounding boxes of GeoJSON objects read from stdin. Optionally explode collections and print the bounds of their features. To print identifiers for input objects along with their bounds as a {id: identifier, bbox: bounds} JSON object, use --with-id. To print the input objects themselves along with their bounds as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') stdout = click.get_text_stream('stdout') try: source = obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) xs = [] ys = [] features = obj.get('features') or [obj] for j, feat in enumerate(features): feat_id = feat.get('id', 'feature:' + str(i)) w, s, e, n = fiona.bounds(feat) if precision > 0: w, s, e, n = (round(v, precision) for v in (w, s, e, n)) if explode: if with_id: rec = { 'parent': obj_id, 'id': feat_id, 'bbox': (w, s, e, n)} elif with_obj: feat.update(parent=obj_id, bbox=(w, s, e, n)) rec = feat else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) else: xs.extend([w, e]) ys.extend([s, n]) if not explode: w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) if with_id: rec = {'id': obj_id, 'bbox': (w, s, e, n)} elif with_obj: obj.update(id=obj_id, bbox=(w, s, e, n)) rec = obj else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/calc.py000066400000000000000000000042151317446052300157700ustar00rootroot00000000000000from __future__ import division import json import logging import click from cligj import use_rs_opt from .helpers import obj_gen, eval_feature_expression @click.command(short_help="Calculate GeoJSON property by Python expression") @click.argument('property_name') @click.argument('expression') @click.option('--overwrite', is_flag=True, default=False, help="Overwrite properties, default: False") @use_rs_opt @click.pass_context def calc(ctx, property_name, expression, overwrite, use_rs): """ Create a new property on GeoJSON features using the specified expression. \b The expression is evaluated in a restricted namespace containing: - sum, pow, min, max and the imported math module - shape (optional, imported from shapely.geometry if available) - bool, int, str, len, float type conversions - f (the feature to be evaluated, allows item access via javascript-style dot notation using munch) The expression will be evaluated for each feature and its return value will be added to the properties as the specified property_name. Existing properties will not be overwritten by default (an Exception is raised). Example \b $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" """ logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') try: source = obj_gen(stdin) for i, obj in enumerate(source): features = obj.get('features') or [obj] for j, feat in enumerate(features): if not overwrite and property_name in feat['properties']: raise click.UsageError( '{0} already exists in properties; ' 'rename or use --overwrite'.format(property_name)) feat['properties'][property_name] = eval_feature_expression( feat, expression) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/cat.py000066400000000000000000000062611317446052300156400ustar00rootroot00000000000000"""$ fio cat""" import json import logging import warnings import click import cligj import fiona from fiona.transform import transform_geom from fiona.fio import options warnings.simplefilter('default') # Cat command @click.command(short_help="Concatenate and print the features of datasets") @click.argument('files', nargs=-1, type=click.Path(resolve_path=False), required=True, metavar="INPUTS...") @click.option('--layer', default=None, multiple=True, callback=options.cb_multilayer, help="Input layer(s), specified as 'fileindex:layer` " "For example, '1:foo,2:bar' will concatenate layer foo " "from file 1 and layer bar from file 2") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @options.dst_crs_opt @cligj.use_rs_opt @click.option('--bbox', default=None, metavar="w,s,e,n", help="filter for features intersecting a bounding box") @click.pass_context def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox, layer): """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') # Validate file idexes provided in --layer option # (can't pass the files to option callback) if layer: options.validate_multilayer_file_index(files, layer) # first layer is the default for i in range(1, len(files) + 1): if str(i) not in layer.keys(): layer[str(i)] = [0] try: with fiona.drivers(CPL_DEBUG=verbosity > 2): for i, path in enumerate(files, 1): for lyr in layer[str(i)]: with fiona.open(path, layer=lyr) as src: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, feat in src.items(bbox=bbox): if dst_crs or precision >= 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/collect.py000066400000000000000000000173441317446052300165220ustar00rootroot00000000000000"""$ fio collect""" from functools import partial import json import logging import click import cligj from fiona.fio import helpers from fiona.fio import options from fiona.transform import transform_geom @click.command(short_help="Collect a sequence of features.") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--record-buffered/--no-record-buffered', default=False, help="Economical buffering of writes at record, not collection " "(default), level.") @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @options.src_crs_opt @click.option('--with-ld-context/--without-ld-context', default=False, help="add a JSON-LD context to JSON output.") @click.option('--add-ld-context-item', multiple=True, help="map a term to a URI and add it to the output's JSON LD " "context.") @click.option('--parse/--no-parse', default=True, help="load and dump the geojson feature (default is True)") @click.pass_context def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, src_crs, with_ld_context, add_ld_context_item, parse): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' if src_crs: if not parse: raise click.UsageError("Can't specify --src-crs with --no-parse") transformer = partial(transform_geom, src_crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) else: transformer = lambda x: x first_line = next(stdin) # If parsing geojson if parse: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) buffer = line.strip(u'\x1e') else: buffer += line else: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) else: def feature_text_gen(): feat = json.loads(first_line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) for line in stdin: feat = json.loads(line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) # If *not* parsing geojson else: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: yield buffer buffer = line.strip(u'\x1e') else: buffer += line else: yield buffer else: def feature_text_gen(): yield first_line for line in stdin: yield line try: source = feature_text_gen() if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") # Try the first record. try: i, first = 0, next(source) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(first.replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(source, 1): try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(rec.replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") sink.write(",".join(source)) sink.write("]") sink.write(tail) sink.write("\n") except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/distrib.py000066400000000000000000000021531317446052300165250ustar00rootroot00000000000000"""$ fio distrib""" import json import logging import click import cligj from fiona.fio import helpers @click.command() @cligj.use_rs_opt @click.pass_context def distrib(ctx, use_rs): """Distribute features from a collection. Print the features of GeoJSON objects read from stdin. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') try: source = helpers.obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) features = obj.get('features') or [obj] for j, feat in enumerate(features): if obj.get('type') == 'FeatureCollection': feat['parent'] = obj_id feat_id = feat.get('id', 'feature:' + str(i)) feat['id'] = feat_id if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/dump.py000066400000000000000000000167011317446052300160360ustar00rootroot00000000000000"""$ fio dump""" from functools import partial import json import logging import click import cligj import fiona from fiona.fio import helpers from fiona.fio import options from fiona.transform import transform_geom @click.command(short_help="Dump a dataset to GeoJSON.") @click.argument('input', type=click.Path(), required=True) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " "numbering when accessed by index.") @click.option('--encoding', help="Specify encoding of the input file.") @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt @click.option('--record-buffered/--no-record-buffered', default=False, help="Economical buffering of writes at record, not collection " "(default), level.") @click.option('--ignore-errors/--no-ignore-errors', default=False, help="log errors but do not stop serialization.") @click.option('--with-ld-context/--without-ld-context', default=False, help="add a JSON-LD context to JSON output.") @click.option('--add-ld-context-item', multiple=True, help="map a term to a URI and add it to the output's JSON LD " "context.") @click.pass_context def dump(ctx, input, encoding, precision, indent, compact, record_buffered, ignore_errors, with_ld_context, add_ld_context_item, layer): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' open_kwds = {} if encoding: open_kwds['encoding'] = encoding if layer: open_kwds['layer'] = layer def transformer(crs, feat): tg = partial(transform_geom, crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) feat['geometry'] = tg(feat['geometry']) return feat try: with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open(input, **open_kwds) as source: meta = source.meta meta['fields'] = dict(source.schema['properties'].items()) if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps( collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") itr = iter(source) # Try the first record. try: i, first = 0, next(itr) first = transformer(first) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(json.dumps( first, **dump_kwds).replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(itr, 1): rec = transformer(rec) try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(json.dumps( rec, **dump_kwds).replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs']} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) collection['features'] = [ helpers.id_record(transformer(rec)) for rec in source] else: collection['features'] = [ transformer(source.crs, rec) for rec in source] json.dump(collection, sink, **dump_kwds) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/env.py000066400000000000000000000015101317446052300156510ustar00rootroot00000000000000"""$ fio env""" import logging import click import fiona @click.command(short_help="Print information about the fio environment.") @click.option('--formats', 'key', flag_value='formats', default=True, help="Enumerate the available formats.") @click.pass_context def env(ctx, key): """Print information about the Fiona environment: available formats, etc. """ verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1 logger = logging.getLogger('fio') stdout = click.get_text_stream('stdout') with fiona.drivers(CPL_DEBUG=(verbosity > 2)) as env: if key == 'formats': for k, v in sorted(fiona.supported_drivers.items()): modes = ', '.join("'" + m + "'" for m in v) stdout.write("%s (modes %s)\n" % (k, modes)) stdout.write('\n') Fiona-1.7.10/fiona/fio/filter.py000066400000000000000000000031211317446052300163460ustar00rootroot00000000000000"""$ fio filter""" import json import logging import click from cligj import use_rs_opt from fiona.fio.helpers import obj_gen, eval_feature_expression @click.command() @click.argument('filter_expression') @use_rs_opt @click.pass_context def filter(ctx, filter_expression, use_rs): """ Filter GeoJSON features by python expression. Features are read from stdin. The expression is evaluated in a restricted namespace containing: - sum, pow, min, max and the imported math module - shape (optional, imported from shapely.geometry if available) - bool, int, str, len, float type conversions - f (the feature to be evaluated, allows item access via javascript-style dot notation using munch) The expression will be evaluated for each feature and, if true, the feature will be included in the output. e.g. fio cat data.shp \ | fio filter "f.properties.area > 1000.0" \ | fio collect > large_polygons.geojson """ logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') try: source = obj_gen(stdin) for i, obj in enumerate(source): features = obj.get('features') or [obj] for j, feat in enumerate(features): if not eval_feature_expression(feat, filter_expression): continue if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/helpers.py000066400000000000000000000062031317446052300165270ustar00rootroot00000000000000""" Helper objects needed by multiple CLI commands. """ from functools import partial import json import math import warnings from munch import munchify warnings.simplefilter('default') def obj_gen(lines): """Return a generator of JSON objects loaded from ``lines``.""" first_line = next(lines) if first_line.startswith(u'\x1e'): def gen(): buffer = first_line.strip(u'\x1e') for line in lines: if line.startswith(u'\x1e'): if buffer: yield json.loads(buffer) buffer = line.strip(u'\x1e') else: buffer += line else: yield json.loads(buffer) else: def gen(): yield json.loads(first_line) for line in lines: yield json.loads(line) return gen() def nullable(val, cast): if val is None: return None else: return cast(val) def eval_feature_expression(feature, expression): safe_dict = {'f': munchify(feature)} safe_dict.update({ 'sum': sum, 'pow': pow, 'min': min, 'max': max, 'math': math, 'bool': bool, 'int': partial(nullable, int), 'str': partial(nullable, str), 'float': partial(nullable, float), 'len': partial(nullable, len), }) try: from shapely.geometry import shape safe_dict['shape'] = shape except ImportError: pass return eval(expression, {"__builtins__": None}, safe_dict) def make_ld_context(context_items): """Returns a JSON-LD Context object. See http://json-ld.org/spec/latest/json-ld.""" ctx = { "@context": { "geojson": "http://ld.geojson.org/vocab#", "Feature": "geojson:Feature", "FeatureCollection": "geojson:FeatureCollection", "GeometryCollection": "geojson:GeometryCollection", "LineString": "geojson:LineString", "MultiLineString": "geojson:MultiLineString", "MultiPoint": "geojson:MultiPoint", "MultiPolygon": "geojson:MultiPolygon", "Point": "geojson:Point", "Polygon": "geojson:Polygon", "bbox": { "@container": "@list", "@id": "geojson:bbox" }, "coordinates": "geojson:coordinates", "datetime": "http://www.w3.org/2006/time#inXSDDateTime", "description": "http://purl.org/dc/terms/description", "features": { "@container": "@set", "@id": "geojson:features" }, "geometry": "geojson:geometry", "id": "@id", "properties": "geojson:properties", "start": "http://www.w3.org/2006/time#hasBeginning", "stop": "http://www.w3.org/2006/time#hasEnding", "title": "http://purl.org/dc/terms/title", "type": "@type", "when": "geojson:when" } } for item in context_items or []: t, uri = item.split("=") ctx[t.strip()] = uri.strip() return ctx def id_record(rec): """Converts a record's id to a blank node id and returns the record.""" rec['id'] = '_:f%s' % rec['id'] return rec Fiona-1.7.10/fiona/fio/info.py000066400000000000000000000052601317446052300160220ustar00rootroot00000000000000"""$ fio info""" import logging import json import click from cligj import indent_opt import fiona import fiona.crs from fiona.fio import options @click.command() # One or more files. @click.argument('input', type=click.Path(exists=False)) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " "numbering when accessed by index.") @indent_opt # Options to pick out a single metadata item and print it as # a string. @click.option('--count', 'meta_member', flag_value='count', help="Print the count of features.") @click.option('-f', '--format', '--driver', 'meta_member', flag_value='driver', help="Print the format driver.") @click.option('--crs', 'meta_member', flag_value='crs', help="Print the CRS as a PROJ.4 string.") @click.option('--bounds', 'meta_member', flag_value='bounds', help="Print the boundary coordinates " "(left, bottom, right, top).") @click.option('--name', 'meta_member', flag_value='name', help="Print the datasource's name.") @click.pass_context def info(ctx, input, indent, meta_member, layer): """ Print information about a dataset. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') try: with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open(input, layer=layer) as src: info = src.meta info.update(bounds=src.bounds, name=src.name) try: info.update(count=len(src)) except TypeError: info.update(count=None) logger.debug("Setting 'count' to None/null - layer does " "not support counting") proj4 = fiona.crs.to_string(src.crs) if proj4.startswith('+init=epsg'): proj4 = proj4.split('=')[1].upper() info['crs'] = proj4 if meta_member: if isinstance(info[meta_member], (list, tuple)): click.echo(" ".join(map(str, info[meta_member]))) else: click.echo(info[meta_member]) else: click.echo(json.dumps(info, indent=indent)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/insp.py000066400000000000000000000027121317446052300160370ustar00rootroot00000000000000"""$ fio insp""" import code import logging import sys import click import fiona @click.command(short_help="Open a dataset and start an interpreter.") @click.argument('src_path', type=click.Path(exists=True)) @click.option('--ipython', 'interpreter', flag_value='ipython', help="Use IPython as interpreter.") @click.pass_context def insp(ctx, src_path, interpreter): verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') banner = 'Fiona %s Interactive Inspector (Python %s)\n' \ 'Type "src.schema", "next(src)", or "help(src)" ' \ 'for more information.' \ % (fiona.__version__, '.'.join(map(str, sys.version_info[:3]))) try: with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open(src_path) as src: scope = locals() if not interpreter: code.interact(banner, local=scope) elif interpreter == 'ipython': import IPython IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=scope) else: raise click.ClickException( 'Interpreter {} is unsupported or missing ' 'dependencies'.format(interpreter)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/load.py000066400000000000000000000073601317446052300160110ustar00rootroot00000000000000"""$ fio load""" from functools import partial import itertools import json import logging import click import fiona from fiona.fio import options from fiona.transform import transform_geom FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in fiona.FIELD_TYPES_MAP.items()]) @click.command(short_help="Load GeoJSON to a dataset in another format.") @click.argument('output', type=click.Path(), required=True) @click.option('-f', '--format', '--driver', required=True, help="Output format driver name.") @options.src_crs_opt @click.option('--dst-crs', '--dst_crs', help="Destination CRS. Defaults to --src-crs when not given.") @click.option('--sequence / --no-sequence', default=False, help="Specify whether the input stream is a LF-delimited " "sequence of GeoJSON features (the default) or a single " "GeoJSON feature collection.") @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Load features into specified layer. Layers use " "zero-based numbering when accessed by index.") @click.pass_context def load(ctx, output, driver, src_crs, dst_crs, sequence, layer): """Load features from JSON to a file in another format. The input is a GeoJSON feature collection or optionally a sequence of GeoJSON feature objects.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') dst_crs = dst_crs or src_crs if src_crs and dst_crs and src_crs != dst_crs: transformer = partial(transform_geom, src_crs, dst_crs, antimeridian_cutting=True, precision=-1) else: transformer = lambda x: x first_line = next(stdin) # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield feat buffer = line.strip(u'\x1e') else: buffer += line else: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield feat elif sequence: def feature_gen(): yield json.loads(first_line) for line in stdin: feat = json.loads(line) feat['geometry'] = transformer(feat['geometry']) yield feat else: def feature_gen(): text = "".join(itertools.chain([first_line], stdin)) for feat in json.loads(text)['features']: feat['geometry'] = transformer(feat['geometry']) yield feat try: source = feature_gen() # Use schema of first feature as a template. # TODO: schema specified on command line? first = next(source) schema = {'geometry': first['geometry']['type']} schema['properties'] = dict([ (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str') for k, v in first['properties'].items()]) with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open( output, 'w', driver=driver, crs=dst_crs, schema=schema, layer=layer) as dst: dst.write(first) dst.writerecords(source) except Exception: logger.exception("Exception caught during processing") raise click.Abort() Fiona-1.7.10/fiona/fio/ls.py000066400000000000000000000007321317446052300155040ustar00rootroot00000000000000"""$ fiona ls""" import json import click from cligj import indent_opt import fiona @click.command() @click.argument('input', type=click.Path(exists=True)) @indent_opt @click.pass_context def ls(ctx, input, indent): """ List layers in a datasource. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 with fiona.drivers(CPL_DEBUG=verbosity > 2): result = fiona.listlayers(input) click.echo(json.dumps(result, indent=indent)) Fiona-1.7.10/fiona/fio/main.py000066400000000000000000000020201317446052300160020ustar00rootroot00000000000000""" Main click group for the CLI. Needs to be isolated for entry-point loading. """ import logging from pkg_resources import iter_entry_points import warnings import sys import click from click_plugins import with_plugins from cligj import verbose_opt, quiet_opt import fiona from fiona import __version__ as fio_version def configure_logging(verbosity): log_level = max(10, 30 - 10*verbosity) logging.basicConfig(stream=sys.stderr, level=log_level) @with_plugins(ep for ep in list(iter_entry_points('fiona.fio_commands')) + list(iter_entry_points('fiona.fio_plugins'))) @click.group() @verbose_opt @quiet_opt @click.version_option(fio_version) @click.version_option(fiona.__gdal_version__, '--gdal-version', prog_name='GDAL') @click.version_option(sys.version, '--python-version', prog_name='Python') @click.pass_context def main_group(ctx, verbose, quiet): """Fiona command line interface.""" verbosity = verbose - quiet configure_logging(verbosity) ctx.obj = {'verbosity': verbosity} Fiona-1.7.10/fiona/fio/options.py000066400000000000000000000021671317446052300165650ustar00rootroot00000000000000"""Common commandline options for `fio`""" from collections import defaultdict import click src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.") dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.") def cb_layer(ctx, param, value): """Let --layer be a name or index.""" if value is None or not value.isdigit(): return value else: return int(value) def cb_multilayer(ctx, param, value): """ Transform layer options from strings ("1:a,1:b", "2:a,2:c,2:z") to { '1': ['a', 'b'], '2': ['a', 'c', 'z'] } """ out = defaultdict(list) for raw in value: for v in raw.split(','): ds, name = v.split(':') out[ds].append(name) return out def validate_multilayer_file_index(files, layerdict): """ Ensure file indexes provided in the --layer option are valid """ for key in layerdict.keys(): if key not in [str(k) for k in range(1, len(files) + 1)]: layer = key + ":" + layerdict[key][0] raise click.BadParameter("Layer {} does not exist".format(layer)) Fiona-1.7.10/fiona/inspector.py000066400000000000000000000017141317446052300163200ustar00rootroot00000000000000 import code import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('fiona.inspector') def main(srcfile): with fiona.drivers(): with fiona.open(srcfile) as src: code.interact( 'Fiona %s Interactive Inspector (Python %s)\n' 'Type "src.schema", "next(src)", or "help(src)" ' 'for more information.' % ( fiona.__version__, '.'.join(map(str, sys.version_info[:3]))), local=locals()) return 1 if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( prog="python -m fiona.inspector", description="Open a data file and drop into an interactive interpreter") parser.add_argument( 'src', metavar='FILE', help="Input dataset file name") args = parser.parse_args() main(args.src) Fiona-1.7.10/fiona/ogrext1.pxd000066400000000000000000000163171317446052300160530ustar00rootroot00000000000000# Copyright (c) 2007, Sean C. Gillies # All rights reserved. # See ../LICENSE.txt cdef extern from "gdal.h": char * GDALVersionInfo (char *pszRequest) cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) void CPLSetConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) cdef extern from "cpl_string.h": char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) cdef extern from "cpl_vsi.h": ctypedef struct VSILFILE: pass int VSIFCloseL (VSILFILE *) VSILFILE * VSIFileFromMemBuffer (const char * filename, unsigned char * data, int data_len, int take_ownership) int VSIUnlink (const char * pathname) ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY cdef extern from "ogr_core.h": char * OGRGeometryTypeToName(int) cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) int OSRFixup(OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) cdef extern from "ogr_api.h": char * OGR_Dr_GetName (void *driver) void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) int OGR_Dr_DeleteDataSource (void *driver, char *) void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) int OGR_DS_DeleteLayer (void *datasource, int n) void * OGR_DS_CreateLayer (void *datasource, char *name, void *crs, int geomType, char **options) void * OGR_DS_ExecuteSQL (void *datasource, char *name, void *filter, char *dialext) void OGR_DS_Destroy (void *datasource) void * OGR_DS_GetDriver (void *layer_defn) void * OGR_DS_GetLayerByName (void *datasource, char *name) int OGR_DS_GetLayerCount (void *datasource) void * OGR_DS_GetLayer (void *datasource, int n) void OGR_DS_ReleaseResultSet (void *datasource, void *results) int OGR_DS_SyncToDisk (void *datasource) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) void * OGR_Fld_Create (char *name, int fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) int OGR_Fld_GetType (void *fielddefn) int OGR_Fld_GetWidth (void *fielddefn) void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) int OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) void * OGR_L_GetSpatialFilter (void *layer) void * OGR_L_GetSpatialRef (void *layer) void OGR_L_ResetReading (void *layer) void OGR_L_SetSpatialFilter (void *layer, void *geometry) void OGR_L_SetSpatialFilterRect ( void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) void * OGRGetDriverByName (char *) void * OGROpen (char *path, int mode, void *x) void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) Fiona-1.7.10/fiona/ogrext1.pyx000066400000000000000000001352211317446052300160740ustar00rootroot00000000000000# These are extension functions and classes using the OGR C API. from __future__ import absolute_import import datetime import json import locale import logging import os import warnings import math import uuid from six import integer_types, string_types, text_type from fiona cimport ogrext1 from fiona._geometry cimport ( GeomBuilder, OGRGeomBuilder, geometry_type_code, normalize_geometry_type_code) from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs from fiona._geometry import GEOMETRY_TYPES from fiona import compat from fiona.errors import ( DriverError, DriverIOError, SchemaError, CRSError, FionaValueError) from fiona.compat import OrderedDict from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType log = logging.getLogger("Fiona") class NullHandler(logging.Handler): def emit(self, record): pass log.addHandler(NullHandler()) # Mapping of OGR integer field types to Fiona field type names. # # Lists are currently unsupported in this version, but might be done as # arrays in a future version. FIELD_TYPES = [ 'int', # OFTInteger, Simple 32bit integer None, # OFTIntegerList, List of 32bit integers 'float', # OFTReal, Double Precision floating point None, # OFTRealList, List of doubles 'str', # OFTString, String of ASCII chars None, # OFTStringList, Array of strings None, # OFTWideString, deprecated None, # OFTWideStringList, deprecated None, # OFTBinary, Raw Binary data 'date', # OFTDate, Date 'time', # OFTTime, Time 'datetime', # OFTDateTime, Date and Time 'int', # OFTInteger64, Single 64bit integer #Not supported None, # OFTInteger64List, List of 64bit integers #Not supported ] # Mapping of Fiona field type names to Python types. FIELD_TYPES_MAP = { 'int': int, 'float': float, 'str': text_type, 'date': FionaDateType, 'time': FionaTimeType, 'datetime': FionaDateTimeType } # OGR Driver capability cdef const char * ODrCCreateDataSource = "CreateDataSource" cdef const char * ODrCDeleteDataSource = "DeleteDataSource" # OGR Layer capability cdef const char * OLC_RANDOMREAD = "RandomRead" cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" cdef const char * OLC_RANDOMWRITE = "RandomWrite" cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" cdef const char * OLC_CREATEFIELD = "CreateField" cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" cdef const char * OLC_DELETEFIELD = "DeleteField" cdef const char * OLC_REORDERFIELDS = "ReorderFields" cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" cdef const char * OLC_DELETEFEATURE = "DeleteFeature" cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" cdef const char * OLC_TRANSACTIONS = "Transactions" # OGR integer error types. OGRERR_NONE = 0 OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ OGRERR_NOT_ENOUGH_MEMORY = 2 OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 OGRERR_UNSUPPORTED_OPERATION = 4 OGRERR_CORRUPT_DATA = 5 OGRERR_FAILURE = 6 OGRERR_UNSUPPORTED_SRS = 7 OGRERR_INVALID_HANDLE = 8 def _explode(coords): """Explode a GeoJSON geometry's coordinates object and yield coordinate tuples. As long as the input is conforming, the type of the geometry doesn't matter.""" for e in coords: if isinstance(e, (float, int)): yield coords break else: for f in _explode(e): yield f def _bounds(geometry): """Bounding box of a GeoJSON geometry""" try: xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) except (KeyError, TypeError): return None def calc_gdal_version_num(maj, min, rev): """Calculates the internal gdal version number based on major, minor and revision""" return int(maj * 1000000 + min * 10000 + rev*100) def get_gdal_version_num(): """Return current internal version number of gdal""" return int(ogrext1.GDALVersionInfo("VERSION_NUM")) def get_gdal_release_name(): """Return release name of gdal""" return ogrext1.GDALVersionInfo("RELEASE_NAME") # Feature extension classes and functions follow. cdef class FeatureBuilder: """Build Fiona features from OGR feature pointers. No OGR objects are allocated by this function and the feature argument is not destroyed. """ cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None): # The only method anyone ever needs to call cdef void *fdefn cdef int i cdef int y = 0 cdef int m = 0 cdef int d = 0 cdef int hh = 0 cdef int mm = 0 cdef int ss = 0 cdef int tz = 0 cdef int retval cdef char *key_c props = OrderedDict() for i in range(ogrext1.OGR_F_GetFieldCount(feature)): fdefn = ogrext1.OGR_F_GetFieldDefnRef(feature, i) if fdefn == NULL: raise ValueError("Null feature definition") key_c = ogrext1.OGR_Fld_GetNameRef(fdefn) if key_c == NULL: raise ValueError("Null field name reference") key_b = key_c key = key_b.decode(encoding) fieldtypename = FIELD_TYPES[ogrext1.OGR_Fld_GetType(fdefn)] if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, ogrext1.OGR_Fld_GetType(fdefn)) continue # TODO: other types fieldtype = FIELD_TYPES_MAP[fieldtypename] if not ogrext1.OGR_F_IsFieldSet(feature, i): props[key] = None elif fieldtype is int: props[key] = ogrext1.OGR_F_GetFieldAsInteger(feature, i) elif fieldtype is float: props[key] = ogrext1.OGR_F_GetFieldAsDouble(feature, i) elif fieldtype is text_type: try: val = ogrext1.OGR_F_GetFieldAsString(feature, i) val = val.decode(encoding) except UnicodeError: log.error("Failed to decode property '%s' value '%s'", key, val) raise # Does the text contain a JSON object? Let's check. # Let's check as cheaply as we can. if driver == 'GeoJSON' and val.startswith('{'): try: val = json.loads(val) except ValueError as err: log.warning(str(err)) # Now add to the properties object. props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): retval = ogrext1.OGR_F_GetFieldAsDateTime( feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) if fieldtype is FionaDateType: props[key] = datetime.date(y, m, d).isoformat() elif fieldtype is FionaTimeType: props[key] = datetime.time(hh, mm, ss).isoformat() else: props[key] = datetime.datetime( y, m, d, hh, mm, ss).isoformat() else: log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) props[key] = None cdef void *cogr_geometry = ogrext1.OGR_F_GetGeometryRef(feature) if cogr_geometry != NULL: geom = GeomBuilder().build(cogr_geometry) else: geom = None return { 'type': 'Feature', 'id': str(ogrext1.OGR_F_GetFID(feature)), 'geometry': geom, 'properties': props } cdef class OGRFeatureBuilder: """Builds an OGR Feature from a Fiona feature mapping. Allocates one OGR Feature which should be destroyed by the caller. Borrows a layer definition from the collection. """ cdef void * build(self, feature, collection) except NULL: cdef void *cogr_geometry = NULL cdef char *string_c cdef WritingSession session session = collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") cdef void *cogr_featuredefn = ogrext1.OGR_L_GetLayerDefn(cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") cdef void *cogr_feature = ogrext1.OGR_F_Create(cogr_featuredefn) if cogr_feature == NULL: raise ValueError("Null feature") if feature['geometry'] is not None: cogr_geometry = OGRGeomBuilder().build( feature['geometry']) ogrext1.OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) # OGR_F_SetFieldString takes UTF-8 encoded strings ('bytes' in # Python 3). encoding = session.get_internalencoding() for key, value in feature['properties'].items(): log.debug( "Looking up %s in %s", key, repr(session._schema_mapping)) ogr_key = session._schema_mapping[key] schema_type = collection.schema['properties'][key] # Catch and re-raise unicode encoding errors. try: key_bytes = ogr_key.encode(encoding) except UnicodeError: log.error("Failed to encode property '%s' value '%s'", key, value) raise key_c = key_bytes i = ogrext1.OGR_F_GetFieldIndex(cogr_feature, key_c) if i < 0: continue # Special case: serialize dicts to assist OGR. if isinstance(value, dict): value = json.dumps(value) # Continue over the standard OGR types. if isinstance(value, integer_types): ogrext1.OGR_F_SetFieldInteger(cogr_feature, i, value) elif isinstance(value, float): ogrext1.OGR_F_SetFieldDouble(cogr_feature, i, value) elif (isinstance(value, string_types) and schema_type in ['date', 'time', 'datetime']): if schema_type == 'date': y, m, d, hh, mm, ss, ff = parse_date(value) elif schema_type == 'time': y, m, d, hh, mm, ss, ff = parse_time(value) else: y, m, d, hh, mm, ss, ff = parse_datetime(value) ogrext1.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, hh, mm, ss, 0) elif (isinstance(value, datetime.date) and schema_type == 'date'): y, m, d = value.year, value.month, value.day ogrext1.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, 0, 0, 0, 0) elif (isinstance(value, datetime.datetime) and schema_type == 'datetime'): y, m, d = value.year, value.month, value.day hh, mm, ss = value.hour, value.minute, value.second ogrext1.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, hh, mm, ss, 0) elif (isinstance(value, datetime.time) and schema_type == 'time'): hh, mm, ss = value.hour, value.minute, value.second ogrext1.OGR_F_SetFieldDateTime( cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) elif isinstance(value, string_types): # Catch, log, and re-raise string field value encoding errors. try: value_bytes = value.encode(encoding) except UnicodeError: log.error("Failed to encode property '%s' value '%s'", key, value) raise string_c = value_bytes ogrext1.OGR_F_SetFieldString(cogr_feature, i, string_c) elif value is None: pass # keep field unset/null else: raise ValueError("Invalid field type %s" % type(value)) log.debug("Set field %s: %s" % (key, value)) return cogr_feature cdef _deleteOgrFeature(void *cogr_feature): """Delete an OGR feature""" if cogr_feature != NULL: ogrext1.OGR_F_Destroy(cogr_feature) cogr_feature = NULL def featureRT(feature, collection): # For testing purposes only, leaks the JSON data cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) cdef void *cogr_geometry = ogrext1.OGR_F_GetGeometryRef(cogr_feature) if cogr_geometry == NULL: raise ValueError("Null geometry") log.debug("Geometry: %s" % ogrext1.OGR_G_ExportToJson(cogr_geometry)) encoding = collection.encoding or 'utf-8' result = FeatureBuilder().build( cogr_feature, bbox=False, encoding=encoding, driver=collection.driver ) _deleteOgrFeature(cogr_feature) return result # Collection-related extension classes and functions cdef class Session: cdef void *cogr_ds cdef void *cogr_layer cdef object _fileencoding cdef object _encoding cdef object collection def __init__(self): self.cogr_ds = NULL self.cogr_layer = NULL self._fileencoding = None self._encoding = None def __dealloc__(self): self.stop() def start(self, collection): cdef const char *path_c = NULL cdef const char *name_c = NULL cdef void *drv = NULL cdef void *ds = NULL if collection.path == '-': path = '/vsistdin/' else: path = collection.path try: path_b = path.encode('utf-8') except UnicodeError: # Presume already a UTF-8 encoded string path_b = path path_c = path_b with cpl_errs: drivers = [] if collection._driver: drivers = [collection._driver] elif collection.enabled_drivers: drivers = collection.enabled_drivers if drivers: for name in drivers: name_b = name.encode() name_c = name_b log.debug("Trying driver: %s", name) drv = ogrext1.OGRGetDriverByName(name_c) if drv != NULL: ds = ogrext1.OGR_Dr_Open(drv, path_c, 0) if ds != NULL: self.cogr_ds = ds collection._driver = name break else: self.cogr_ds = ogrext1.OGROpen(path_c, 0, NULL) if self.cogr_ds == NULL: raise FionaValueError( "No dataset found at path '%s' using drivers: %s" % ( collection.path, drivers or '*')) if isinstance(collection.name, string_types): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = ogrext1.OGR_DS_GetLayerByName( self.cogr_ds, name_c) elif isinstance(collection.name, int): self.cogr_layer = ogrext1.OGR_DS_GetLayer( self.cogr_ds, collection.name) name_c = ogrext1.OGR_L_GetName(self.cogr_layer) name_b = name_c collection.name = name_b.decode('utf-8') if self.cogr_layer == NULL: raise ValueError("Null layer: " + repr(collection.name)) self.collection = collection userencoding = self.collection.encoding if userencoding: ogrext1.CPLSetThreadLocalConfigOption('SHAPE_ENCODING', '') self._fileencoding = userencoding.upper() else: self._fileencoding = ( ogrext1.OGR_L_TestCapability( self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or ( self.get_driver() == "ESRI Shapefile" and 'ISO-8859-1') or locale.getpreferredencoding().upper() def stop(self): self.cogr_layer = NULL if self.cogr_ds != NULL: ogrext1.OGR_DS_Destroy(self.cogr_ds) self.cogr_ds = NULL def get_fileencoding(self): return self._fileencoding def get_internalencoding(self): if not self._encoding: fileencoding = self.get_fileencoding() self._encoding = ( ogrext1.OGR_L_TestCapability( self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or fileencoding return self._encoding def get_length(self): if self.cogr_layer == NULL: raise ValueError("Null layer") return ogrext1.OGR_L_GetFeatureCount(self.cogr_layer, 0) def get_driver(self): cdef void *cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) if cogr_driver == NULL: raise ValueError("Null driver") cdef char *name = ogrext1.OGR_Dr_GetName(cogr_driver) driver_name = name return driver_name.decode() def get_schema(self): cdef int i cdef int n cdef void *cogr_featuredefn cdef void *cogr_fielddefn cdef char *key_c props = [] if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_featuredefn = ogrext1.OGR_L_GetLayerDefn(self.cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") n = ogrext1.OGR_FD_GetFieldCount(cogr_featuredefn) for i from 0 <= i < n: cogr_fielddefn = ogrext1.OGR_FD_GetFieldDefn(cogr_featuredefn, i) if cogr_fielddefn == NULL: raise ValueError("Null field definition") key_c = ogrext1.OGR_Fld_GetNameRef(cogr_fielddefn) key_b = key_c if not bool(key_b): raise ValueError("Invalid field name ref: %s" % key) key = key_b.decode(self.get_internalencoding()) fieldtypename = FIELD_TYPES[ogrext1.OGR_Fld_GetType(cogr_fielddefn)] if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, ogrext1.OGR_Fld_GetType(cogr_fielddefn)) continue val = fieldtypename if fieldtypename == 'float': fmt = "" width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 24: fmt = ":%d" % width precision = ogrext1.OGR_Fld_GetPrecision(cogr_fielddefn) if precision: # and precision != 15: fmt += ".%d" % precision val = "float" + fmt elif fieldtypename == 'int': fmt = "" width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 11: fmt = ":%d" % width val = fieldtypename + fmt elif fieldtypename == 'str': fmt = "" width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 80: fmt = ":%d" % width val = fieldtypename + fmt props.append((key, val)) code = normalize_geometry_type_code( ogrext1.OGR_FD_GetGeomType(cogr_featuredefn)) return { 'properties': OrderedDict(props), 'geometry': GEOMETRY_TYPES[code]} def get_crs(self): cdef char *proj_c = NULL cdef char *auth_key = NULL cdef char *auth_val = NULL cdef void *cogr_crs = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_crs = ogrext1.OGR_L_GetSpatialRef(self.cogr_layer) crs = {} if cogr_crs != NULL: log.debug("Got coordinate system") retval = ogrext1.OSRAutoIdentifyEPSG(cogr_crs) if retval > 0: log.info("Failed to auto identify EPSG: %d", retval) auth_key = ogrext1.OSRGetAuthorityName(cogr_crs, NULL) auth_val = ogrext1.OSRGetAuthorityCode(cogr_crs, NULL) if auth_key != NULL and auth_val != NULL: key_b = auth_key key = key_b.decode('utf-8') if key == 'EPSG': val_b = auth_val val = val_b.decode('utf-8') crs['init'] = "epsg:" + val else: ogrext1.OSRExportToProj4(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c log.debug("Params: %s", proj_b) value = proj_b.decode() value = value.strip() for param in value.split(): kv = param.split("=") if len(kv) == 2: k, v = kv try: v = float(v) if v % 1 == 0: v = int(v) except ValueError: # Leave v as a string pass elif len(kv) == 1: k, v = kv[0], True else: raise ValueError("Unexpected proj parameter %s" % param) k = k.lstrip("+") crs[k] = v ogrext1.CPLFree(proj_c) else: log.debug("Projection not found (cogr_crs was NULL)") return crs def get_crs_wkt(self): cdef char *proj_c = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_crs = ogrext1.OGR_L_GetSpatialRef(self.cogr_layer) crs_wkt = "" if cogr_crs != NULL: log.debug("Got coordinate system") ogrext1.OSRExportToWkt(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c crs_wkt = proj_b.decode('utf-8') ogrext1.CPLFree(proj_c) else: log.debug("Projection not found (cogr_crs was NULL)") return crs_wkt def get_extent(self): if self.cogr_layer == NULL: raise ValueError("Null layer") cdef ogrext1.OGREnvelope extent result = ogrext1.OGR_L_GetExtent(self.cogr_layer, &extent, 1) return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) def has_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: _deleteOgrFeature(cogr_feature) return True else: return False def get_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: _deleteOgrFeature(cogr_feature) return True else: return False def __getitem__(self, item): cdef void * cogr_feature if isinstance(item, slice): itr = Iterator(self.collection, item.start, item.stop, item.step) log.debug("Slice: %r", item) return list(itr) elif isinstance(item, int): index = item # from the back if index < 0: ftcount = ogrext1.OGR_L_GetFeatureCount(self.cogr_layer, 0) if ftcount == -1: raise IndexError( "collection's dataset does not support negative indexes") index += ftcount cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, index) if cogr_feature == NULL: return None feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.get_internalencoding(), driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return feature def isactive(self): if self.cogr_layer != NULL and self.cogr_ds != NULL: return 1 else: return 0 cdef class WritingSession(Session): cdef object _schema_mapping def start(self, collection): cdef void *cogr_fielddefn cdef void *cogr_driver cdef void *cogr_ds = NULL cdef void *cogr_layer = NULL cdef void *cogr_srs = NULL cdef char **options = NULL self.collection = collection cdef char *path_c cdef char *driver_c cdef char *name_c cdef char *proj_c cdef char *fileencoding_c path = collection.path if collection.mode == 'a': if os.path.exists(path): try: path_b = path.encode('utf-8') except UnicodeError: path_b = path path_c = path_b with cpl_errs: self.cogr_ds = ogrext1.OGROpen(path_c, 1, NULL) if self.cogr_ds == NULL: raise RuntimeError("Failed to open %s" % path) cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) if cogr_driver == NULL: raise ValueError("Null driver") if isinstance(collection.name, string_types): name_b = collection.name.encode() name_c = name_b self.cogr_layer = ogrext1.OGR_DS_GetLayerByName( self.cogr_ds, name_c) elif isinstance(collection.name, int): self.cogr_layer = ogrext1.OGR_DS_GetLayer( self.cogr_ds, collection.name) if self.cogr_layer == NULL: raise RuntimeError( "Failed to get layer %s" % collection.name) else: raise OSError("No such file or directory %s" % path) userencoding = self.collection.encoding self._fileencoding = (userencoding or ( ogrext1.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or ( self.get_driver() == "ESRI Shapefile" and 'ISO-8859-1') or locale.getpreferredencoding()).upper() elif collection.mode == 'w': try: path_b = path.encode('utf-8') except UnicodeError: path_b = path path_c = path_b driver_b = collection.driver.encode() driver_c = driver_b # TODO: use exc_wrap_pointer() cogr_driver = ogrext1.OGRGetDriverByName(driver_c) if cogr_driver == NULL: raise ValueError("Null driver") # Our most common use case is the creation of a new data # file and historically we've assumed that it's a file on # the local filesystem and queryable via os.path. # # TODO: remove the assumption. # TODO: use exc_wrap_pointer(). if not os.path.exists(path): cogr_ds = ogrext1.OGR_Dr_CreateDataSource( cogr_driver, path_c, NULL) else: cogr_ds = ogrext1.OGROpen(path_c, 1, NULL) if cogr_ds == NULL: try: cogr_ds = exc_wrap_pointer( ogrext1.OGR_Dr_CreateDataSource( cogr_driver, path_c, NULL)) except Exception as exc: raise DriverIOError(str(exc)) elif collection.name is None: ogrext1.OGR_DS_Destroy(cogr_ds) cogr_ds = NULL log.debug("Deleted pre-existing data at %s", path) cogr_ds = ogrext1.OGR_Dr_CreateDataSource( cogr_driver, path_c, NULL) else: pass if cogr_ds == NULL: raise RuntimeError("Failed to open %s" % path) else: self.cogr_ds = cogr_ds # Set the spatial reference system from the crs given to the # collection constructor. We by-pass the crs_wkt and crs # properties because they aren't accessible until the layer # is constructed (later). col_crs = collection._crs_wkt or collection._crs if col_crs: cogr_srs = ogrext1.OSRNewSpatialReference(NULL) if cogr_srs == NULL: raise ValueError("NULL spatial reference") # First, check for CRS strings like "EPSG:3857". if isinstance(col_crs, string_types): proj_b = col_crs.encode('utf-8') proj_c = proj_b ogrext1.OSRSetFromUserInput(cogr_srs, proj_c) elif isinstance(col_crs, compat.DICT_TYPES): # EPSG is a special case. init = col_crs.get('init') if init: log.debug("Init: %s", init) auth, val = init.split(':') if auth.upper() == 'EPSG': log.debug("Setting EPSG: %s", val) ogrext1.OSRImportFromEPSG(cogr_srs, int(val)) else: params = [] col_crs['wktext'] = True for k, v in col_crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) log.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b ogrext1.OSRImportFromProj4(cogr_srs, proj_c) else: raise ValueError("Invalid CRS") # Fixup, export to WKT, and set the GDAL dataset's projection. ogrext1.OSRFixup(cogr_srs) # Figure out what encoding to use. The encoding parameter given # to the collection constructor takes highest precedence, then # 'iso-8859-1', then the system's default encoding as last resort. sysencoding = locale.getpreferredencoding() userencoding = collection.encoding self._fileencoding = (userencoding or ( collection.driver == "ESRI Shapefile" and 'ISO-8859-1') or sysencoding).upper() fileencoding = self.get_fileencoding() if fileencoding: fileencoding_b = fileencoding.encode('utf-8') fileencoding_c = fileencoding_b options = ogrext1.CSLSetNameValue(options, "ENCODING", fileencoding_c) log.debug("Output file encoding: %s", fileencoding) # Does the layer exist already? If so, we delete it. layer_count = ogrext1.OGR_DS_GetLayerCount(self.cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = ogrext1.OGR_DS_GetLayer(cogr_ds, i) name_c = ogrext1.OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) idx = -1 if isinstance(collection.name, string_types): if collection.name in layer_names: idx = layer_names.index(collection.name) elif isinstance(collection.name, int): if collection.name >= 0 and collection.name < layer_count: idx = collection.name if idx >= 0: log.debug("Deleted pre-existing layer at %s", collection.name) ogrext1.OGR_DS_DeleteLayer(self.cogr_ds, idx) # Create the named layer in the datasource. name_b = collection.name.encode('utf-8') name_c = name_b try: self.cogr_layer = exc_wrap_pointer( ogrext1.OGR_DS_CreateLayer( self.cogr_ds, name_c, cogr_srs, geometry_type_code( collection.schema.get('geometry', 'Unknown')), options)) except Exception as exc: raise DriverError(str(exc)) finally: # Shapefile layers make a copy of the passed srs. GPKG # layers, on the other hand, increment its reference # count. OSRRelease() is the safe way to release # OGRSpatialReferenceH. if cogr_srs != NULL: ogrext1.OSRRelease(cogr_srs) if options != NULL: ogrext1.CSLDestroy(options) log.debug("Created layer") # Next, make a layer definition from the given schema properties, # which are an ordered dict since Fiona 1.0.1. for key, value in collection.schema['properties'].items(): log.debug("Creating field: %s %s", key, value) # Convert 'long' to 'int'. See # https://github.com/Toblerity/Fiona/issues/101. if value == 'long': value = 'int' # Is there a field width/precision? width = precision = None if ':' in value: value, fmt = value.split(':') if '.' in fmt: width, precision = map(int, fmt.split('.')) else: width = int(fmt) encoding = self.get_internalencoding() key_bytes = key.encode(encoding) cogr_fielddefn = ogrext1.OGR_Fld_Create( key_bytes, FIELD_TYPES.index(value) ) if cogr_fielddefn == NULL: raise ValueError("Null field definition") if width: ogrext1.OGR_Fld_SetWidth(cogr_fielddefn, width) if precision: ogrext1.OGR_Fld_SetPrecision(cogr_fielddefn, precision) ogrext1.OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1) ogrext1.OGR_Fld_Destroy(cogr_fielddefn) log.debug("Created fields") # Mapping of the Python collection schema to the munged # OGR schema. ogr_schema = self.get_schema() self._schema_mapping = dict(zip( collection.schema['properties'].keys(), ogr_schema['properties'].keys() )) log.debug("Writing started") def writerecs(self, records, collection): """Writes buffered records to OGR.""" cdef void *cogr_driver cdef void *cogr_feature cdef void *cogr_layer = self.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") schema_geom_type = collection.schema['geometry'] cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) if ogrext1.OGR_Dr_GetName(cogr_driver) == b"GeoJSON": def validate_geometry_type(rec): return True elif ogrext1.OGR_Dr_GetName(cogr_driver) == b"ESRI Shapefile" \ and "Point" not in collection.schema['geometry']: schema_geom_type = collection.schema['geometry'].lstrip( "3D ").lstrip("Multi") def validate_geometry_type(rec): return rec['geometry'] is None or \ rec['geometry']['type'].lstrip( "3D ").lstrip("Multi") == schema_geom_type else: schema_geom_type = collection.schema['geometry'].lstrip("3D ") def validate_geometry_type(rec): return rec['geometry'] is None or \ rec['geometry']['type'].lstrip("3D ") == schema_geom_type schema_props_keys = set(collection.schema['properties'].keys()) for record in records: log.debug("Creating feature in layer: %s" % record) # Validate against collection's schema. if set(record['properties'].keys()) != schema_props_keys: raise ValueError( "Record does not match collection schema: %r != %r" % ( record['properties'].keys(), list(schema_props_keys) )) if not validate_geometry_type(record): raise ValueError( "Record's geometry type does not match " "collection schema's geometry type: %r != %r" % ( record['geometry']['type'], collection.schema['geometry'] )) cogr_feature = OGRFeatureBuilder().build(record, collection) result = ogrext1.OGR_L_CreateFeature(cogr_layer, cogr_feature) if result != OGRERR_NONE: raise RuntimeError("Failed to write record: %s" % record) _deleteOgrFeature(cogr_feature) def sync(self, collection): """Syncs OGR to disk.""" cdef void *cogr_ds = self.cogr_ds cdef void *cogr_layer = self.cogr_layer if cogr_ds == NULL: raise ValueError("Null data source") log.debug("Syncing OGR to disk") retval = ogrext1.OGR_DS_SyncToDisk(cogr_ds) if retval != OGRERR_NONE: raise RuntimeError("Failed to sync to disk") cdef class Iterator: """Provides iterated access to feature data. """ # Reference to its Collection cdef collection cdef encoding cdef int next_index cdef stop cdef start cdef step cdef fastindex cdef stepsign def __cinit__(self, collection, start=None, stop=None, step=None, bbox=None, mask=None): if collection.session is None: raise ValueError("I/O operation on closed collection") self.collection = collection cdef Session session cdef void *cogr_geometry session = self.collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") ogrext1.OGR_L_ResetReading(cogr_layer) if bbox and mask: raise ValueError("mask and bbox can not be set together") if bbox: ogrext1.OGR_L_SetSpatialFilterRect( cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) elif mask: cogr_geometry = OGRGeomBuilder().build(mask) ogrext1.OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) ogrext1.OGR_G_DestroyGeometry(cogr_geometry) else: ogrext1.OGR_L_SetSpatialFilter( cogr_layer, NULL) self.encoding = session.get_internalencoding() self.fastindex = ogrext1.OGR_L_TestCapability( session.cogr_layer, OLC_FASTSETNEXTBYINDEX) ftcount = ogrext1.OGR_L_GetFeatureCount(session.cogr_layer, 0) if ftcount == -1 and ((start is not None and start < 0) or (stop is not None and stop < 0)): raise IndexError( "collection's dataset does not support negative slice indexes") if stop is not None and stop < 0: stop += ftcount if start is None: start = 0 if start is not None and start < 0: start += ftcount # step size if step is None: step = 1 if step == 0: raise ValueError("slice step cannot be zero") if step < 0 and not self.fastindex: warnings.warn("Layer does not support" \ "OLCFastSetNextByIndex, negative step size may" \ " be slow", RuntimeWarning) self.stepsign = int(math.copysign(1, step)) self.stop = stop self.start = start self.step = step self.next_index = start log.debug("Index: %d", self.next_index) ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) def __iter__(self): return self def _next(self): """Internal method to set read cursor to next item""" cdef Session session session = self.collection.session # Check if next_index is valid if self.next_index < 0: raise StopIteration if self.stepsign == 1: if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): raise StopIteration else: if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): raise StopIteration # Set read cursor to next_item position if self.step > 1 and self.fastindex: ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) elif self.step > 1 and not self.fastindex and not self.next_index == self.start: for _ in range(self.step - 1): # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration elif self.step > 1 and not self.fastindex and self.next_index == self.start: ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) elif self.step == 0: # ogrext1.OGR_L_GetNextFeature increments read cursor by one pass elif self.step < 0: ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) # set the next index self.next_index += self.step def __next__(self): cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.encoding, driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return feature cdef class ItemsIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = ogrext1.OGR_F_GetFID(cogr_feature) feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.encoding, driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return fid, feature cdef class KeysIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = ogrext1.OGR_F_GetFID(cogr_feature) _deleteOgrFeature(cogr_feature) return fid def _remove(path, driver=None): """Deletes an OGR data source """ cdef void *cogr_driver cdef int result if driver is None: driver = 'ESRI Shapefile' cogr_driver = ogrext1.OGRGetDriverByName(driver.encode('utf-8')) if cogr_driver == NULL: raise ValueError("Null driver") if not ogrext1.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): raise RuntimeError("Driver does not support dataset removal operation") result = ogrext1.OGR_Dr_DeleteDataSource(cogr_driver, path.encode('utf-8')) if result != OGRERR_NONE: raise RuntimeError("Failed to remove data source {}".format(path)) def _listlayers(path): """Provides a list of the layers in an OGR data source. """ cdef void *cogr_ds cdef void *cogr_layer cdef char *path_c cdef char *name_c # Open OGR data source. try: path_b = path.encode('utf-8') except UnicodeError: path_b = path path_c = path_b with cpl_errs: cogr_ds = ogrext1.OGROpen(path_c, 0, NULL) if cogr_ds == NULL: raise ValueError("No data available at path '%s'" % path) # Loop over the layers to get their names. layer_count = ogrext1.OGR_DS_GetLayerCount(cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = ogrext1.OGR_DS_GetLayer(cogr_ds, i) name_c = ogrext1.OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) # Close up data source. if cogr_ds != NULL: ogrext1.OGR_DS_Destroy(cogr_ds) cogr_ds = NULL return layer_names def buffer_to_virtual_file(bytesbuf, ext=''): """Maps a bytes buffer to a virtual file. `ext` is empty or begins with a period and contains at most one period. """ vsi_filename = os.path.join('/vsimem', uuid.uuid4().hex + ext) vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') vsi_handle = ogrext1.VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) if vsi_handle == NULL: raise OSError('failed to map buffer to file') if ogrext1.VSIFCloseL(vsi_handle) != 0: raise OSError('failed to close mapped file handle') return vsi_filename def remove_virtual_file(vsi_filename): vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') return ogrext1.VSIUnlink(vsi_cfilename) Fiona-1.7.10/fiona/ogrext2.pxd000066400000000000000000000207151317446052300160510ustar00rootroot00000000000000# Copyright (c) 2007, Sean C. Gillies # All rights reserved. # See ../LICENSE.txt cdef extern from "gdal.h": char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, unsigned int nOpenFlags, const char ** papszAllowedDrivers, const char ** papszOpenOptions, const char *const *papszSibling1Files ) int GDAL_OF_UPDATE int GDAL_OF_READONLY int GDAL_OF_VECTOR int GDAL_OF_VERBOSE_ERROR int GDALDatasetGetLayerCount(void * hds) void * GDALDatasetGetLayer(void * hDS, int iLayer) void * GDALDatasetGetLayerByName(void * hDS, char * pszName) void GDALClose(void * hDS) void * GDALCreate(void * hDriver, const char * pszFilename, int nXSize, int nYSize, int nBands, GDALDataType eBandType, char ** papszOptions) void * GDALDatasetCreateLayer(void * hDS, const char * pszName, void * hSpatialRef, int eType, char ** papszOptions) int GDALDatasetDeleteLayer(void * hDS, int iLayer) void GDALFlushCache(void * hDS) char * GDALGetDriverShortName(void * hDriver) char * GDALGetDatasetDriver (void * hDataset) int GDALDeleteDataset(void * hDriver, const char * pszFilename) ctypedef enum GDALDataType: GDT_Unknown GDT_Byte GDT_UInt16 GDT_Int16 GDT_UInt32 GDT_Int32 GDT_Float32 GDT_Float64 GDT_CInt16 GDT_CInt32 GDT_CFloat32 GDT_CFloat64 GDT_TypeCount cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) cdef extern from "cpl_string.h": char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) cdef extern from "cpl_vsi.h": ctypedef struct VSILFILE: pass int VSIFCloseL (VSILFILE *) VSILFILE * VSIFileFromMemBuffer (const char * filename, unsigned char * data, int data_len, int take_ownership) int VSIUnlink (const char * pathname) cdef extern from "ogr_core.h": ctypedef int OGRErr ctypedef struct OGREnvelope: double MinX double MaxX double MinY double MaxY char * OGRGeometryTypeToName(int) cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH void OSRCleanup () OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) int OSRImportFromEPSG (OGRSpatialReferenceH, int code) int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) int OSRFixup(OGRSpatialReferenceH srs) const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) void OSRRelease (OGRSpatialReferenceH srs) void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) cdef extern from "ogr_api.h": char * OGR_Dr_GetName (void *driver) void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) int OGR_Dr_DeleteDataSource (void *driver, char *) void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) void * OGR_Fld_Create (char *name, int fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) int OGR_Fld_GetType (void *fielddefn) int OGR_Fld_GetWidth (void *fielddefn) void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) void * OGR_G_GetGeometryRef (void *geometry, int n) int OGR_G_GetPointCount (void *geometry) double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) int OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) void * OGR_L_GetSpatialFilter (void *layer) void * OGR_L_GetSpatialRef (void *layer) void OGR_L_ResetReading (void *layer) void OGR_L_SetSpatialFilter (void *layer, void *geometry) void OGR_L_SetSpatialFilterRect ( void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) void * OGRGetDriverByName (char *) void * OGROpen (char *path, int mode, void *x) void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) Fiona-1.7.10/fiona/ogrext2.pyx000066400000000000000000001402051317446052300160730ustar00rootroot00000000000000# These are extension functions and classes using the OGR C API. from __future__ import absolute_import import datetime import json import locale import logging import os import warnings import math import uuid from six import integer_types, string_types, text_type from fiona cimport ogrext2 from fiona.ogrext2 cimport OGREnvelope from fiona._geometry cimport ( GeomBuilder, OGRGeomBuilder, geometry_type_code, normalize_geometry_type_code) from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs from fiona._geometry import GEOMETRY_TYPES from fiona import compat from fiona.errors import ( DriverError, DriverIOError, SchemaError, CRSError, FionaValueError) from fiona.compat import OrderedDict from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType from libc.stdlib cimport malloc, free from libc.string cimport strcmp log = logging.getLogger("Fiona") # Mapping of OGR integer field types to Fiona field type names. # # Lists are currently unsupported in this version, but might be done as # arrays in a future version. FIELD_TYPES = [ 'int', # OFTInteger, Simple 32bit integer None, # OFTIntegerList, List of 32bit integers 'float', # OFTReal, Double Precision floating point None, # OFTRealList, List of doubles 'str', # OFTString, String of ASCII chars None, # OFTStringList, Array of strings None, # OFTWideString, deprecated None, # OFTWideStringList, deprecated None, # OFTBinary, Raw Binary data 'date', # OFTDate, Date 'time', # OFTTime, Time 'datetime', # OFTDateTime, Date and Time 'int', # OFTInteger64, Single 64bit integer None, # OFTInteger64List, List of 64bit integers ] # Mapping of Fiona field type names to Python types. FIELD_TYPES_MAP = { 'int': int, 'float': float, 'str': text_type, 'date': FionaDateType, 'time': FionaTimeType, 'datetime': FionaDateTimeType } # OGR Driver capability cdef const char * ODrCCreateDataSource = "CreateDataSource" cdef const char * ODrCDeleteDataSource = "DeleteDataSource" # OGR Layer capability cdef const char * OLC_RANDOMREAD = "RandomRead" cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" cdef const char * OLC_RANDOMWRITE = "RandomWrite" cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" cdef const char * OLC_CREATEFIELD = "CreateField" cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" cdef const char * OLC_DELETEFIELD = "DeleteField" cdef const char * OLC_REORDERFIELDS = "ReorderFields" cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" cdef const char * OLC_DELETEFEATURE = "DeleteFeature" cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" cdef const char * OLC_TRANSACTIONS = "Transactions" # OGR integer error types. OGRERR_NONE = 0 OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ OGRERR_NOT_ENOUGH_MEMORY = 2 OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 OGRERR_UNSUPPORTED_OPERATION = 4 OGRERR_CORRUPT_DATA = 5 OGRERR_FAILURE = 6 OGRERR_UNSUPPORTED_SRS = 7 OGRERR_INVALID_HANDLE = 8 def _explode(coords): """Explode a GeoJSON geometry's coordinates object and yield coordinate tuples. As long as the input is conforming, the type of the geometry doesn't matter.""" for e in coords: if isinstance(e, (float, int)): yield coords break else: for f in _explode(e): yield f def _bounds(geometry): """Bounding box of a GeoJSON geometry""" try: xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) except (KeyError, TypeError): return None def calc_gdal_version_num(maj, min, rev): """Calculates the internal gdal version number based on major, minor and revision""" return int(maj * 1000000 + min * 10000 + rev*100) def get_gdal_version_num(): """Return current internal version number of gdal""" return int(ogrext2.GDALVersionInfo("VERSION_NUM")) def get_gdal_release_name(): """Return release name of gdal""" return ogrext2.GDALVersionInfo("RELEASE_NAME") # Feature extension classes and functions follow. cdef class FeatureBuilder: """Build Fiona features from OGR feature pointers. No OGR objects are allocated by this function and the feature argument is not destroyed. """ cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None): # The only method anyone ever needs to call cdef void *fdefn cdef int i cdef int y = 0 cdef int m = 0 cdef int d = 0 cdef int hh = 0 cdef int mm = 0 cdef int ss = 0 cdef int tz = 0 cdef int retval cdef const char *key_c = NULL props = OrderedDict() for i in range(ogrext2.OGR_F_GetFieldCount(feature)): fdefn = ogrext2.OGR_F_GetFieldDefnRef(feature, i) if fdefn == NULL: raise ValueError("Null feature definition") key_c = ogrext2.OGR_Fld_GetNameRef(fdefn) if key_c == NULL: raise ValueError("Null field name reference") key_b = key_c key = key_b.decode(encoding) fieldtypename = FIELD_TYPES[ogrext2.OGR_Fld_GetType(fdefn)] if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, ogrext2.OGR_Fld_GetType(fdefn)) continue # TODO: other types fieldtype = FIELD_TYPES_MAP[fieldtypename] if not ogrext2.OGR_F_IsFieldSet(feature, i): props[key] = None elif fieldtype is int: props[key] = ogrext2.OGR_F_GetFieldAsInteger64(feature, i) elif fieldtype is float: props[key] = ogrext2.OGR_F_GetFieldAsDouble(feature, i) elif fieldtype is text_type: try: val = ogrext2.OGR_F_GetFieldAsString(feature, i) val = val.decode(encoding) except UnicodeDecodeError: log.warning( "Failed to decode %s using %s codec", val, encoding) # Does the text contain a JSON object? Let's check. # Let's check as cheaply as we can. if driver == 'GeoJSON' and val.startswith('{'): try: val = json.loads(val) except ValueError as err: log.warning(str(err)) # Now add to the properties object. props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): retval = ogrext2.OGR_F_GetFieldAsDateTime( feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) if fieldtype is FionaDateType: props[key] = datetime.date(y, m, d).isoformat() elif fieldtype is FionaTimeType: props[key] = datetime.time(hh, mm, ss).isoformat() else: props[key] = datetime.datetime( y, m, d, hh, mm, ss).isoformat() else: log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) props[key] = None cdef void *cogr_geometry = ogrext2.OGR_F_GetGeometryRef(feature) if cogr_geometry is not NULL: geom = GeomBuilder().build(cogr_geometry) else: geom = None return { 'type': 'Feature', 'id': str(ogrext2.OGR_F_GetFID(feature)), 'geometry': geom, 'properties': props } cdef class OGRFeatureBuilder: """Builds an OGR Feature from a Fiona feature mapping. Allocates one OGR Feature which should be destroyed by the caller. Borrows a layer definition from the collection. """ cdef void * build(self, feature, collection) except NULL: cdef void *cogr_geometry = NULL cdef const char *string_c = NULL cdef WritingSession session session = collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") cdef void *cogr_featuredefn = ogrext2.OGR_L_GetLayerDefn(cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") cdef void *cogr_feature = ogrext2.OGR_F_Create(cogr_featuredefn) if cogr_feature == NULL: raise ValueError("Null feature") if feature['geometry'] is not None: cogr_geometry = OGRGeomBuilder().build( feature['geometry']) ogrext2.OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) # OGR_F_SetFieldString takes UTF-8 encoded strings ('bytes' in # Python 3). encoding = session.get_internalencoding() for key, value in feature['properties'].items(): log.debug( "Looking up %s in %s", key, repr(session._schema_mapping)) ogr_key = session._schema_mapping[key] schema_type = collection.schema['properties'][key] try: key_bytes = ogr_key.encode(encoding) except UnicodeDecodeError: log.warning("Failed to encode %s using %s codec", key, encoding) key_bytes = ogr_key key_c = key_bytes i = ogrext2.OGR_F_GetFieldIndex(cogr_feature, key_c) if i < 0: continue # Special case: serialize dicts to assist OGR. if isinstance(value, dict): value = json.dumps(value) # Continue over the standard OGR types. if isinstance(value, integer_types): ogrext2.OGR_F_SetFieldInteger64(cogr_feature, i, value) elif isinstance(value, float): ogrext2.OGR_F_SetFieldDouble(cogr_feature, i, value) elif (isinstance(value, string_types) and schema_type in ['date', 'time', 'datetime']): if schema_type == 'date': y, m, d, hh, mm, ss, ff = parse_date(value) elif schema_type == 'time': y, m, d, hh, mm, ss, ff = parse_time(value) else: y, m, d, hh, mm, ss, ff = parse_datetime(value) ogrext2.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, hh, mm, ss, 0) elif (isinstance(value, datetime.date) and schema_type == 'date'): y, m, d = value.year, value.month, value.day ogrext2.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, 0, 0, 0, 0) elif (isinstance(value, datetime.datetime) and schema_type == 'datetime'): y, m, d = value.year, value.month, value.day hh, mm, ss = value.hour, value.minute, value.second ogrext2.OGR_F_SetFieldDateTime( cogr_feature, i, y, m, d, hh, mm, ss, 0) elif (isinstance(value, datetime.time) and schema_type == 'time'): hh, mm, ss = value.hour, value.minute, value.second ogrext2.OGR_F_SetFieldDateTime( cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) elif isinstance(value, string_types): try: value_bytes = value.encode(encoding) except UnicodeDecodeError: log.warning( "Failed to encode %s using %s codec", value, encoding) value_bytes = value string_c = value_bytes ogrext2.OGR_F_SetFieldString(cogr_feature, i, string_c) elif value is None: pass # keep field unset/null else: raise ValueError("Invalid field type %s" % type(value)) log.debug("Set field %s: %s" % (key, value)) return cogr_feature cdef _deleteOgrFeature(void *cogr_feature): """Delete an OGR feature""" if cogr_feature is not NULL: ogrext2.OGR_F_Destroy(cogr_feature) cogr_feature = NULL def featureRT(feature, collection): # For testing purposes only, leaks the JSON data cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) cdef void *cogr_geometry = ogrext2.OGR_F_GetGeometryRef(cogr_feature) if cogr_geometry == NULL: raise ValueError("Null geometry") log.debug("Geometry: %s" % ogrext2.OGR_G_ExportToJson(cogr_geometry)) encoding = collection.encoding or 'utf-8' result = FeatureBuilder().build( cogr_feature, bbox=False, encoding=encoding, driver=collection.driver ) _deleteOgrFeature(cogr_feature) return result # Collection-related extension classes and functions cdef class Session: cdef void *cogr_ds cdef void *cogr_layer cdef object _fileencoding cdef object _encoding cdef object collection def __init__(self): self.cogr_ds = NULL self.cogr_layer = NULL self._fileencoding = None self._encoding = None def __dealloc__(self): self.stop() def start(self, collection): cdef const char *path_c = NULL cdef const char *name_c = NULL cdef void *drv = NULL cdef void *ds = NULL cdef char **drvs = NULL if collection.path == '-': path = '/vsistdin/' else: path = collection.path try: path_b = path.encode('utf-8') except UnicodeDecodeError: # Presume already a UTF-8 encoded string path_b = path path_c = path_b # TODO: eliminate this context manager in 2.0 as we have done # in Rasterio 1.0. with cpl_errs: # We have two ways of specifying drivers to try. Resolve the # values into a single set of driver short names. if collection._driver: drivers = set([collection._driver]) elif collection.enabled_drivers: drivers = set(collection.enabled_drivers) else: drivers = None # If there are specified drivers, make a GDAL string list # of their names. if drivers: for name in drivers: name_b = name.encode() name_c = name_b log.debug("Trying driver: %s", name) drv = ogrext2.GDALGetDriverByName(name_c) if drv != NULL: drvs = ogrext2.CSLAddString(drvs, name_c) flags = ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_READONLY try: self.cogr_ds = ogrext2.GDALOpenEx( path_c, flags, drvs, NULL, NULL) finally: ogrext2.CSLDestroy(drvs) if self.cogr_ds == NULL: raise FionaValueError( "No dataset found at path '%s' using drivers: %s" % ( collection.path, drivers or '*')) if isinstance(collection.name, string_types): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = ogrext2.GDALDatasetGetLayerByName( self.cogr_ds, name_c) elif isinstance(collection.name, int): self.cogr_layer = ogrext2.GDALDatasetGetLayer( self.cogr_ds, collection.name) name_c = ogrext2.OGR_L_GetName(self.cogr_layer) name_b = name_c collection.name = name_b.decode('utf-8') if self.cogr_layer == NULL: raise ValueError("Null layer: " + repr(collection.name)) self.collection = collection userencoding = self.collection.encoding if userencoding: ogrext2.CPLSetThreadLocalConfigOption('SHAPE_ENCODING', '') self._fileencoding = userencoding.upper() else: self._fileencoding = ( ogrext2.OGR_L_TestCapability( self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or ( self.get_driver() == "ESRI Shapefile" and 'ISO-8859-1') or locale.getpreferredencoding().upper() def stop(self): self.cogr_layer = NULL if self.cogr_ds != NULL: ogrext2.GDALClose(self.cogr_ds) self.cogr_ds = NULL def get_fileencoding(self): return self._fileencoding def get_internalencoding(self): if not self._encoding: fileencoding = self.get_fileencoding() self._encoding = ( ogrext2.OGR_L_TestCapability( self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or fileencoding return self._encoding def get_length(self): if self.cogr_layer == NULL: raise ValueError("Null layer") return ogrext2.OGR_L_GetFeatureCount(self.cogr_layer, 0) def get_driver(self): cdef void *cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) if cogr_driver == NULL: raise ValueError("Null driver") cdef const char *name = ogrext2.OGR_Dr_GetName(cogr_driver) driver_name = name return driver_name.decode() def get_schema(self): cdef int i cdef int n cdef void *cogr_featuredefn cdef void *cogr_fielddefn cdef const char *key_c props = [] if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_featuredefn = ogrext2.OGR_L_GetLayerDefn(self.cogr_layer) if cogr_featuredefn == NULL: raise ValueError("Null feature definition") n = ogrext2.OGR_FD_GetFieldCount(cogr_featuredefn) for i from 0 <= i < n: cogr_fielddefn = ogrext2.OGR_FD_GetFieldDefn(cogr_featuredefn, i) if cogr_fielddefn == NULL: raise ValueError("Null field definition") key_c = ogrext2.OGR_Fld_GetNameRef(cogr_fielddefn) key_b = key_c if not bool(key_b): raise ValueError("Invalid field name ref: %s" % key) key = key_b.decode(self.get_internalencoding()) fieldtypename = FIELD_TYPES[ogrext2.OGR_Fld_GetType(cogr_fielddefn)] if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", key, ogrext2.OGR_Fld_GetType(cogr_fielddefn)) continue val = fieldtypename if fieldtypename == 'float': fmt = "" width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 24: fmt = ":%d" % width precision = ogrext2.OGR_Fld_GetPrecision(cogr_fielddefn) if precision: # and precision != 15: fmt += ".%d" % precision val = "float" + fmt elif fieldtypename == 'int': fmt = "" width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 11: fmt = ":%d" % width val = fieldtypename + fmt elif fieldtypename == 'str': fmt = "" width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) if width: # and width != 80: fmt = ":%d" % width val = fieldtypename + fmt props.append((key, val)) code = normalize_geometry_type_code( ogrext2.OGR_FD_GetGeomType(cogr_featuredefn)) return { 'properties': OrderedDict(props), 'geometry': GEOMETRY_TYPES[code]} def get_crs(self): cdef char *proj_c = NULL cdef const char *auth_key = NULL cdef const char *auth_val = NULL cdef void *cogr_crs = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_crs = ogrext2.OGR_L_GetSpatialRef(self.cogr_layer) crs = {} if cogr_crs is not NULL: log.debug("Got coordinate system") retval = ogrext2.OSRAutoIdentifyEPSG(cogr_crs) if retval > 0: log.info("Failed to auto identify EPSG: %d", retval) auth_key = ogrext2.OSRGetAuthorityName(cogr_crs, NULL) auth_val = ogrext2.OSRGetAuthorityCode(cogr_crs, NULL) if auth_key != NULL and auth_val != NULL: key_b = auth_key key = key_b.decode('utf-8') if key == 'EPSG': val_b = auth_val val = val_b.decode('utf-8') crs['init'] = "epsg:" + val else: ogrext2.OSRExportToProj4(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c log.debug("Params: %s", proj_b) value = proj_b.decode() value = value.strip() for param in value.split(): kv = param.split("=") if len(kv) == 2: k, v = kv try: v = float(v) if v % 1 == 0: v = int(v) except ValueError: # Leave v as a string pass elif len(kv) == 1: k, v = kv[0], True else: raise ValueError("Unexpected proj parameter %s" % param) k = k.lstrip("+") crs[k] = v ogrext2.CPLFree(proj_c) else: log.debug("Projection not found (cogr_crs was NULL)") return crs def get_crs_wkt(self): cdef char *proj_c = NULL if self.cogr_layer == NULL: raise ValueError("Null layer") cogr_crs = ogrext2.OGR_L_GetSpatialRef(self.cogr_layer) crs_wkt = "" if cogr_crs is not NULL: log.debug("Got coordinate system") ogrext2.OSRExportToWkt(cogr_crs, &proj_c) if proj_c == NULL: raise ValueError("Null projection") proj_b = proj_c crs_wkt = proj_b.decode('utf-8') ogrext2.CPLFree(proj_c) else: log.debug("Projection not found (cogr_crs was NULL)") return crs_wkt def get_extent(self): cdef OGREnvelope extent if self.cogr_layer == NULL: raise ValueError("Null layer") result = ogrext2.OGR_L_GetExtent(self.cogr_layer, &extent, 1) return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) def has_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: _deleteOgrFeature(cogr_feature) return True else: return False def get_feature(self, fid): """Provides access to feature data by FID. Supports Collection.__contains__(). """ cdef void * cogr_feature fid = int(fid) cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, fid) if cogr_feature != NULL: _deleteOgrFeature(cogr_feature) return True else: return False def __getitem__(self, item): cdef void * cogr_feature if isinstance(item, slice): itr = Iterator(self.collection, item.start, item.stop, item.step) log.debug("Slice: %r", item) return list(itr) elif isinstance(item, int): index = item # from the back if index < 0: ftcount = ogrext2.OGR_L_GetFeatureCount(self.cogr_layer, 0) if ftcount == -1: raise IndexError( "collection's dataset does not support negative indexes") index += ftcount cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, index) if cogr_feature == NULL: return None feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.get_internalencoding(), driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return feature def isactive(self): if self.cogr_layer != NULL and self.cogr_ds != NULL: return 1 else: return 0 cdef class WritingSession(Session): cdef object _schema_mapping def start(self, collection): cdef void *cogr_fielddefn = NULL cdef void *cogr_driver = NULL cdef void *cogr_ds = NULL cdef void *cogr_layer = NULL cdef void *cogr_srs = NULL cdef char **options = NULL self.collection = collection cdef const char *path_c = NULL cdef const char *driver_c = NULL cdef const char *name_c = NULL cdef const char *proj_c = NULL cdef const char *fileencoding_c = NULL path = collection.path if collection.mode == 'a': if os.path.exists(path): try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b self.cogr_ds = ogrext2.GDALOpenEx(path_c, ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_UPDATE, NULL, NULL, NULL) cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) if cogr_driver == NULL: raise ValueError("Null driver") if isinstance(collection.name, string_types): name_b = collection.name.encode() name_c = name_b self.cogr_layer = ogrext2.GDALDatasetGetLayerByName( self.cogr_ds, name_c) elif isinstance(collection.name, int): self.cogr_layer = ogrext2.GDALDatasetGetLayer( self.cogr_ds, collection.name) if self.cogr_layer == NULL: raise RuntimeError( "Failed to get layer %s" % collection.name) else: raise OSError("No such file or directory %s" % path) userencoding = self.collection.encoding self._fileencoding = (userencoding or ( ogrext2.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and 'utf-8') or ( self.get_driver() == "ESRI Shapefile" and 'ISO-8859-1') or locale.getpreferredencoding()).upper() elif collection.mode == 'w': try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b driver_b = collection.driver.encode() driver_c = driver_b cogr_driver = ogrext2.GDALGetDriverByName(driver_c) if cogr_driver == NULL: raise ValueError("Null driver") # Our most common use case is the creation of a new data # file and historically we've assumed that it's a file on # the local filesystem and queryable via os.path. # # TODO: remove the assumption. if not os.path.exists(path): cogr_ds = exc_wrap_pointer(ogrext2.GDALCreate( cogr_driver, path_c, 0, 0, 0, ogrext2.GDT_Unknown, NULL)) # TODO: revisit the logic in the following blocks when we # change the assumption above. # TODO: use exc_wrap_pointer() else: cogr_ds = ogrext2.GDALOpenEx(path_c, ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_UPDATE, NULL, NULL, NULL) # TODO: use exc_wrap_pointer() if cogr_ds == NULL: cogr_ds = ogrext2.GDALCreate( cogr_driver, path_c, 0, 0, 0, ogrext2.GDT_Unknown, NULL) elif collection.name is None: ogrext2.GDALClose(cogr_ds) cogr_ds = NULL log.debug("Deleted pre-existing data at %s", path) cogr_ds = ogrext2.GDALCreate( cogr_driver, path_c, 0, 0, 0, ogrext2.GDT_Unknown, NULL) else: pass if cogr_ds == NULL: raise RuntimeError("Failed to open %s" % path) else: self.cogr_ds = cogr_ds # Set the spatial reference system from the crs given to the # collection constructor. We by-pass the crs_wkt and crs # properties because they aren't accessible until the layer # is constructed (later). col_crs = collection._crs_wkt or collection._crs if col_crs: cogr_srs = ogrext2.OSRNewSpatialReference(NULL) if cogr_srs == NULL: raise ValueError("NULL spatial reference") # First, check for CRS strings like "EPSG:3857". if isinstance(col_crs, string_types): proj_b = col_crs.encode('utf-8') proj_c = proj_b ogrext2.OSRSetFromUserInput(cogr_srs, proj_c) elif isinstance(col_crs, compat.DICT_TYPES): # EPSG is a special case. init = col_crs.get('init') if init: log.debug("Init: %s", init) auth, val = init.split(':') if auth.upper() == 'EPSG': log.debug("Setting EPSG: %s", val) ogrext2.OSRImportFromEPSG(cogr_srs, int(val)) else: params = [] col_crs['wktext'] = True for k, v in col_crs.items(): if v is True or (k in ('no_defs', 'wktext') and v): params.append("+%s" % k) else: params.append("+%s=%s" % (k, v)) proj = " ".join(params) log.debug("PROJ.4 to be imported: %r", proj) proj_b = proj.encode('utf-8') proj_c = proj_b ogrext2.OSRImportFromProj4(cogr_srs, proj_c) else: raise ValueError("Invalid CRS") # Fixup, export to WKT, and set the GDAL dataset's projection. ogrext2.OSRFixup(cogr_srs) # Figure out what encoding to use. The encoding parameter given # to the collection constructor takes highest precedence, then # 'iso-8859-1', then the system's default encoding as last resort. sysencoding = locale.getpreferredencoding() userencoding = collection.encoding self._fileencoding = (userencoding or ( collection.driver == "ESRI Shapefile" and 'ISO-8859-1') or sysencoding).upper() # The ENCODING option makes no sense for some drivers and # will result in a warning. Fixing is a TODO. fileencoding = self.get_fileencoding() if fileencoding: fileencoding_b = fileencoding.encode() fileencoding_c = fileencoding_b with cpl_errs: options = ogrext2.CSLSetNameValue(options, "ENCODING", fileencoding_c) # Does the layer exist already? If so, we delete it. layer_count = ogrext2.GDALDatasetGetLayerCount(self.cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = ogrext2.GDALDatasetGetLayer(cogr_ds, i) name_c = ogrext2.OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) idx = -1 if isinstance(collection.name, string_types): if collection.name in layer_names: idx = layer_names.index(collection.name) elif isinstance(collection.name, int): if collection.name >= 0 and collection.name < layer_count: idx = collection.name if idx >= 0: log.debug("Deleted pre-existing layer at %s", collection.name) ogrext2.GDALDatasetDeleteLayer(self.cogr_ds, idx) # Create the named layer in the datasource. name_b = collection.name.encode('utf-8') name_c = name_b try: self.cogr_layer = exc_wrap_pointer( ogrext2.GDALDatasetCreateLayer( self.cogr_ds, name_c, cogr_srs, geometry_type_code( collection.schema.get('geometry', 'Unknown')), options)) except Exception as exc: raise DriverIOError(str(exc)) finally: if options != NULL: ogrext2.CSLDestroy(options) # Shapefile layers make a copy of the passed srs. GPKG # layers, on the other hand, increment its reference # count. OSRRelease() is the safe way to release # OGRSpatialReferenceH. if cogr_srs != NULL: ogrext2.OSRRelease(cogr_srs) if self.cogr_layer == NULL: raise ValueError("Null layer") log.debug("Created layer %s", collection.name) # Next, make a layer definition from the given schema properties, # which are an ordered dict since Fiona 1.0.1. for key, value in collection.schema['properties'].items(): log.debug("Creating field: %s %s", key, value) # Convert 'long' to 'int'. See # https://github.com/Toblerity/Fiona/issues/101. if value == 'long': value = 'int' # Is there a field width/precision? width = precision = None if ':' in value: value, fmt = value.split(':') if '.' in fmt: width, precision = map(int, fmt.split('.')) else: width = int(fmt) field_type = FIELD_TYPES.index(value) # See https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 if value == 'int' and (width is not None and width >= 10): field_type = 12 encoding = self.get_internalencoding() key_bytes = key.encode(encoding) cogr_fielddefn = ogrext2.OGR_Fld_Create( key_bytes, field_type) if cogr_fielddefn == NULL: raise ValueError("Null field definition") if width: ogrext2.OGR_Fld_SetWidth(cogr_fielddefn, width) if precision: ogrext2.OGR_Fld_SetPrecision(cogr_fielddefn, precision) ogrext2.OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1) ogrext2.OGR_Fld_Destroy(cogr_fielddefn) log.debug("Created fields") # Mapping of the Python collection schema to the munged # OGR schema. ogr_schema = self.get_schema() self._schema_mapping = dict(zip( collection.schema['properties'].keys(), ogr_schema['properties'].keys() )) log.debug("Writing started") def writerecs(self, records, collection): """Writes buffered records to OGR.""" cdef void *cogr_driver cdef void *cogr_feature cdef void *cogr_layer = self.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") schema_geom_type = collection.schema['geometry'] cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) if ogrext2.OGR_Dr_GetName(cogr_driver) == b"GeoJSON": def validate_geometry_type(rec): return True elif ogrext2.OGR_Dr_GetName(cogr_driver) == b"ESRI Shapefile" \ and "Point" not in collection.schema['geometry']: schema_geom_type = collection.schema['geometry'].lstrip( "3D ").lstrip("Multi") def validate_geometry_type(rec): return rec['geometry'] is None or \ rec['geometry']['type'].lstrip( "3D ").lstrip("Multi") == schema_geom_type else: schema_geom_type = collection.schema['geometry'].lstrip("3D ") def validate_geometry_type(rec): return rec['geometry'] is None or \ rec['geometry']['type'].lstrip("3D ") == schema_geom_type schema_props_keys = set(collection.schema['properties'].keys()) for record in records: log.debug("Creating feature in layer: %s" % record) # Validate against collection's schema. if set(record['properties'].keys()) != schema_props_keys: raise ValueError( "Record does not match collection schema: %r != %r" % ( record['properties'].keys(), list(schema_props_keys) )) if not validate_geometry_type(record): raise ValueError( "Record's geometry type does not match " "collection schema's geometry type: %r != %r" % ( record['geometry']['type'], collection.schema['geometry'] )) cogr_feature = OGRFeatureBuilder().build(record, collection) result = ogrext2.OGR_L_CreateFeature(cogr_layer, cogr_feature) if result != OGRERR_NONE: raise RuntimeError("Failed to write record: %s" % record) _deleteOgrFeature(cogr_feature) def sync(self, collection): """Syncs OGR to disk.""" cdef void *cogr_ds = self.cogr_ds cdef void *cogr_layer = self.cogr_layer if cogr_ds == NULL: raise ValueError("Null data source") with cpl_errs: ogrext2.GDALFlushCache(cogr_ds) log.debug("Flushed data source cache") cdef class Iterator: """Provides iterated access to feature data. """ # Reference to its Collection cdef collection cdef encoding cdef int next_index cdef stop cdef start cdef step cdef fastindex cdef stepsign def __cinit__(self, collection, start=None, stop=None, step=None, bbox=None, mask=None): if collection.session is None: raise ValueError("I/O operation on closed collection") self.collection = collection cdef Session session cdef void *cogr_geometry session = self.collection.session cdef void *cogr_layer = session.cogr_layer if cogr_layer == NULL: raise ValueError("Null layer") ogrext2.OGR_L_ResetReading(cogr_layer) if bbox and mask: raise ValueError("mask and bbox can not be set together") if bbox: ogrext2.OGR_L_SetSpatialFilterRect( cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) elif mask: cogr_geometry = OGRGeomBuilder().build(mask) ogrext2.OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) ogrext2.OGR_G_DestroyGeometry(cogr_geometry) else: ogrext2.OGR_L_SetSpatialFilter( cogr_layer, NULL) self.encoding = session.get_internalencoding() self.fastindex = ogrext2.OGR_L_TestCapability( session.cogr_layer, OLC_FASTSETNEXTBYINDEX) ftcount = ogrext2.OGR_L_GetFeatureCount(session.cogr_layer, 0) if ftcount == -1 and ((start is not None and start < 0) or (stop is not None and stop < 0)): raise IndexError( "collection's dataset does not support negative slice indexes") if stop is not None and stop < 0: stop += ftcount if start is None: start = 0 if start is not None and start < 0: start += ftcount # step size if step is None: step = 1 if step == 0: raise ValueError("slice step cannot be zero") if step < 0 and not self.fastindex: warnings.warn("Layer does not support" \ "OLCFastSetNextByIndex, negative step size may" \ " be slow", RuntimeWarning) self.stepsign = int(math.copysign(1, step)) self.stop = stop self.start = start self.step = step self.next_index = start log.debug("Index: %d", self.next_index) ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) def __iter__(self): return self def _next(self): """Internal method to set read cursor to next item""" cdef Session session session = self.collection.session # Check if next_index is valid if self.next_index < 0: raise StopIteration if self.stepsign == 1: if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): raise StopIteration else: if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): raise StopIteration # Set read cursor to next_item position if self.step > 1 and self.fastindex: ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) elif self.step > 1 and not self.fastindex and not self.next_index == self.start: for _ in range(self.step - 1): # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration elif self.step > 1 and not self.fastindex and self.next_index == self.start: ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) elif self.step == 0: # ogrext2.OGR_L_GetNextFeature increments read cursor by one pass elif self.step < 0: ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) # set the next index self.next_index += self.step def __next__(self): cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.encoding, driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return feature cdef class ItemsIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = ogrext2.OGR_F_GetFID(cogr_feature) feature = FeatureBuilder().build( cogr_feature, bbox=False, encoding=self.encoding, driver=self.collection.driver ) _deleteOgrFeature(cogr_feature) return fid, feature cdef class KeysIterator(Iterator): def __next__(self): cdef long fid cdef void * cogr_feature cdef Session session session = self.collection.session #Update read cursor self._next() # Get the next feature. cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) if cogr_feature == NULL: raise StopIteration fid = ogrext2.OGR_F_GetFID(cogr_feature) _deleteOgrFeature(cogr_feature) return fid def _remove(path, driver=None): """Deletes an OGR data source """ cdef void *cogr_driver cdef int result if driver is None: driver = 'ESRI Shapefile' cogr_driver = ogrext2.OGRGetDriverByName(driver.encode('utf-8')) if cogr_driver == NULL: raise ValueError("Null driver") if not ogrext2.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): raise RuntimeError("Driver does not support dataset removal operation") result = ogrext2.GDALDeleteDataset(cogr_driver, path.encode('utf-8')) if result != OGRERR_NONE: raise RuntimeError("Failed to remove data source {}".format(path)) def _listlayers(path): """Provides a list of the layers in an OGR data source. """ cdef void *cogr_ds cdef void *cogr_layer cdef const char *path_c cdef const char *name_c # Open OGR data source. try: path_b = path.encode('utf-8') except UnicodeDecodeError: path_b = path path_c = path_b with cpl_errs: cogr_ds = ogrext2.GDALOpenEx(path_c, ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_READONLY, NULL, NULL, NULL) # cogr_ds = ogrext2.OGROpen(path_c, 0, NULL) if cogr_ds == NULL: raise ValueError("No data available at path '%s'" % path) # Loop over the layers to get their names. layer_count = ogrext2.GDALDatasetGetLayerCount(cogr_ds) layer_names = [] for i in range(layer_count): cogr_layer = ogrext2.GDALDatasetGetLayer(cogr_ds, i) name_c = ogrext2.OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) # Close up data source. if cogr_ds != NULL: ogrext2.GDALClose(cogr_ds) cogr_ds = NULL return layer_names def buffer_to_virtual_file(bytesbuf, ext=''): """Maps a bytes buffer to a virtual file. `ext` is empty or begins with a period and contains at most one period. """ vsi_filename = os.path.join('/vsimem', uuid.uuid4().hex + ext) vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') vsi_handle = ogrext2.VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) if vsi_handle == NULL: raise OSError('failed to map buffer to file') if ogrext2.VSIFCloseL(vsi_handle) != 0: raise OSError('failed to close mapped file handle') return vsi_filename def remove_virtual_file(vsi_filename): vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') return ogrext2.VSIUnlink(vsi_cfilename) Fiona-1.7.10/fiona/rfc3339.py000066400000000000000000000046441317446052300154130ustar00rootroot00000000000000# Fiona's date and time is founded on RFC 3339. # # OGR knows 3 time "zones": GMT, "local time", amd "unknown". Fiona, when # writing will convert times with a timezone offset to GMT (Z) and otherwise # will write times with the unknown zone. import datetime import logging import re log = logging.getLogger("Fiona") # Fiona's 'date', 'time', and 'datetime' types are sub types of 'str'. class FionaDateType(str): """Dates without time.""" class FionaTimeType(str): """Times without dates.""" class FionaDateTimeType(str): """Dates and times.""" pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") pattern_time = re.compile( r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) pattern_datetime = re.compile( r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) class group_accessor(object): def __init__(self, m): self.match = m def group(self, i): try: return self.match.group(i) or 0 except IndexError: return 0 def parse_time(text): """Given a RFC 3339 time, returns a tz-naive datetime tuple""" match = re.search(pattern_time, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return (0, 0, 0, int(g.group(1)), int(g.group(3)), int(g.group(5)), 1000000.0*float(g.group(6)) ) def parse_date(text): """Given a RFC 3339 date, returns a tz-naive datetime tuple""" match = re.search(pattern_date, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return ( int(g.group(1)), int(g.group(3)), int(g.group(5)), 0, 0, 0, 0.0 ) def parse_datetime(text): """Given a RFC 3339 datetime, returns a tz-naive datetime tuple""" match = re.search(pattern_datetime, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return ( int(g.group(1)), int(g.group(3)), int(g.group(5)), int(g.group(7)), int(g.group(9)), int(g.group(11)), 1000000.0*float(g.group(12)) ) Fiona-1.7.10/fiona/tool.py000066400000000000000000000232451317446052300152720ustar00rootroot00000000000000""" fiona.tool Converts Shapefiles (etc) to GeoJSON. """ import json import logging import pprint import sys from six.moves import map import fiona def open_output(arg): """Returns an opened output stream.""" if arg == sys.stdout: return arg else: return open(arg, 'w') def make_ld_context(context_items): """Returns a JSON-LD Context object. See http://json-ld.org/spec/latest/json-ld.""" ctx = { 'type': '@type', 'id': '@id', 'FeatureCollection': '_:n1', '_crs': {'@id': '_:n2', '@type': '@id'}, 'bbox': 'http://geovocab.org/geometry#bbox', 'features': '_:n3', 'Feature': 'http://geovocab.org/spatial#Feature', 'properties': '_:n4', 'geometry': 'http://geovocab.org/geometry#geometry', 'Point': 'http://geovocab.org/geometry#Point', 'LineString': 'http://geovocab.org/geometry#LineString', 'Polygon': 'http://geovocab.org/geometry#Polygon', 'MultiPoint': 'http://geovocab.org/geometry#MultiPoint', 'MultiLineString': 'http://geovocab.org/geometry#MultiLineString', 'MultiPolygon': 'http://geovocab.org/geometry#MultiPolygon', 'GeometryCollection': 'http://geovocab.org/geometry#GeometryCollection', 'coordinates': '_:n5'} for item in context_items or []: t, uri = item.split("=") ctx[t.strip()] = uri.strip() return ctx def crs_uri(crs): """Returns a CRS URN computed from a crs dict.""" # References version 6.3 of the EPSG database. # TODO: get proper version from GDAL/OGR API? if crs['proj'] == 'longlat' and ( crs['datum'] == 'WGS84' or crs['ellps'] == 'WGS84'): return 'urn:ogc:def:crs:OGC:1.3:CRS84' elif 'epsg:' in crs.get('init', ''): epsg, code = crs['init'].split(':') return 'urn:ogc:def:crs:EPSG::%s' % code else: return None def id_record(rec): """Converts a record's id to a blank node id and returns the record.""" rec['id'] = '_:f%s' % rec['id'] return rec def main(args, dump_kw, item_sep, ignore_errors): """Returns 0 on success, 1 on error, for sys.exit.""" with fiona.drivers(): with open_output(args.outfile) as sink: with fiona.open(args.infile) as source: meta = source.meta.copy() meta['fields'] = dict(source.schema['properties'].items()) if args.description: meta['name'] = args.infile meta['schema']['properties'] = list( source.schema['properties'].items()) json.dump(meta, sink, **dump_kw) elif args.record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(args.indent) rec_indent = "\n" + " " * (2 * (args.indent or 0)) collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], '_crs': crs_uri(meta['crs']), 'features': [] } if args.use_ld_context: collection['@context'] = make_ld_context( args.ld_context_items) head, tail = json.dumps(collection, **dump_kw).split('[]') sink.write(head) sink.write("[") itr = iter(source) # Try the first record. try: i, first = 0, next(itr) if args.use_ld_context: first = id_record(first) if indented: sink.write(rec_indent) sink.write( json.dumps(first, **dump_kw ).replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") return 1 # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(itr, 1): try: if args.use_ld_context: rec = id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write( json.dumps(rec, **dump_kw ).replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") return 1 # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], '_crs': crs_uri(meta['crs']) } if args.use_ld_context: collection['@context'] = make_ld_context( args.ld_context_items) collection['features'] = list(map(id_record, source)) else: collection['features'] = list(source) json.dump(collection, sink, **dump_kw) return 0 if __name__ == '__main__': import argparse logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('fiona.tool') parser = argparse.ArgumentParser( description="Serialize a file's records or description to GeoJSON") parser.add_argument('infile', help="input file name") parser.add_argument('outfile', nargs='?', help="output file name, defaults to stdout if omitted", default=sys.stdout) parser.add_argument('-d', '--description', action='store_true', help="serialize file's data description (schema) only") parser.add_argument('-n', '--indent', type=int, default=None, metavar='N', help="indentation level in N number of chars") parser.add_argument('--compact', action='store_true', help="use compact separators (',', ':')") parser.add_argument('--encoding', default=None, metavar='ENC', help="Specify encoding of the input file") parser.add_argument('--record-buffered', dest='record_buffered', action='store_true', help="Economical buffering of writes at record, not collection (default), level") parser.add_argument('--ignore-errors', dest='ignore_errors', action='store_true', help="log errors but do not stop serialization") parser.add_argument('--use-ld-context', dest='use_ld_context', action='store_true', help="add a JSON-LD context to JSON output") parser.add_argument('--add-ld-context-item', dest='ld_context_items', action='append', metavar='TERM=URI', help="map a term to a URI and add it to the output's JSON LD context") args = parser.parse_args() # Keyword args to be used in all following json.dump* calls. dump_kw = {'sort_keys': True} if args.indent: dump_kw['indent'] = args.indent if args.compact: dump_kw['separators'] = (',', ':') item_sep = args.compact and ',' or ', ' ignore_errors = args.ignore_errors sys.exit(main(args, dump_kw, item_sep, ignore_errors)) Fiona-1.7.10/fiona/transform.py000066400000000000000000000064561317446052300163350ustar00rootroot00000000000000"""Coordinate and geometry warping and reprojection""" from fiona._transform import _transform, _transform_geom def transform(src_crs, dst_crs, xs, ys): """Transform coordinates from one reference system to another. Parameters ---------- src_crs: str or dict A string like 'EPSG:4326' or a dict of proj4 parameters like {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} representing the coordinate reference system on the "source" or "from" side of the transformation. dst_crs: str or dict A string or dict representing the coordinate reference system on the "destination" or "to" side of the transformation. xs: sequence of float A list or tuple of x coordinate values. Must have the same length as the ``ys`` parameter. ys: sequence of float A list or tuple of y coordinate values. Must have the same length as the ``xs`` parameter. Returns ------- xp, yp: list of float A pair of transformed coordinate sequences. The elements of ``xp`` and ``yp`` correspond exactly to the elements of the ``xs`` and ``ys`` input parameters. Examples -------- >>> transform('EPSG:4326', 'EPSG:26953', [-105.0], [40.0]) ([957097.0952383667], [378940.8419189212]) """ # Function is implemented in the _transform C extension module. return _transform(src_crs, dst_crs, xs, ys) def transform_geom( src_crs, dst_crs, geom, antimeridian_cutting=False, antimeridian_offset=10.0, precision=-1): """Transform a geometry obj from one reference system to another. Parameters ---------- src_crs: str or dict A string like 'EPSG:4326' or a dict of proj4 parameters like {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} representing the coordinate reference system on the "source" or "from" side of the transformation. dst_crs: str or dict A string or dict representing the coordinate reference system on the "destination" or "to" side of the transformation. geom: obj A GeoJSON-like geometry object with 'type' and 'coordinates' members. antimeridian_cutting: bool, optional ``True`` to cut output geometries in two at the antimeridian, the default is ``False`. antimeridian_offset: float, optional A distance in decimal degrees from the antimeridian, outside of which geometries will not be cut. precision: int, optional Optional rounding precision of output coordinates, in number of decimal places. Returns ------- obj A new GeoJSON-like geometry with transformed coordinates. Note that if the output is at the antimeridian, it may be cut and of a different geometry ``type`` than the input, e.g., a polygon input may result in multi-polygon output. Examples -------- >>> transform_geom( ... 'EPSG:4326', 'EPSG:26953', ... {'type': 'Point', 'coordinates': [-105.0, 40.0]}) {'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)} """ # Function is implemented in the _transform C extension module. return _transform_geom( src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, precision) Fiona-1.7.10/pep-508-install000077500000000000000000000016051317446052300153330ustar00rootroot00000000000000#!/usr/bin/env python """Prototype support for PEP 518: "Specifying Minimum Build System Requirements for Python Projects". A future version of pip will do this for us and we'll remove this script. This script installs Fiona in develop mode (``pip install -e .[test]``). """ import subprocess def main(): # Parse config file for build system requirements. build_system_requirements = None with open('pyproject.toml') as config: for line in config: if line.startswith('requires'): build_system_requirements = line.split('=')[-1] # Install them if found. if build_system_requirements: reqs = eval(build_system_requirements) subprocess.call(['pip', 'install'] + reqs) # Now install our package in editable mode. subprocess.call(['pip', 'install', '-e', '.[test]'] + reqs) if __name__ == '__main__': main() Fiona-1.7.10/pyproject.toml000066400000000000000000000001641317446052300155560ustar00rootroot00000000000000[build-system] # Minimum requirements for the build system to execute. requires = ["setuptools", "wheel", "cython"] Fiona-1.7.10/requirements-dev.txt000066400000000000000000000001161317446052300166770ustar00rootroot00000000000000-r requirements.txt coverage cython>=0.21.2 nose pytest pytest-cov setuptools Fiona-1.7.10/requirements.txt000066400000000000000000000000451317446052300161240ustar00rootroot00000000000000argparse cligj six ordereddict munch Fiona-1.7.10/scripts/000077500000000000000000000000001317446052300143305ustar00rootroot00000000000000Fiona-1.7.10/scripts/dumpgj000066400000000000000000000043121317446052300155410ustar00rootroot00000000000000#!/usr/bin/env python import logging import sys from fiona.tool import main if __name__ == '__main__': import argparse logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('fiona.tool') parser = argparse.ArgumentParser( description="Serialize a file's records or description to GeoJSON") parser.add_argument('infile', help="input file name") parser.add_argument('outfile', nargs='?', help="output file name, defaults to stdout if omitted", default=sys.stdout) parser.add_argument('-d', '--description', action='store_true', help="serialize file's data description (schema) only") parser.add_argument('-n', '--indent', type=int, default=None, metavar='N', help="indentation level in N number of chars") parser.add_argument('--compact', action='store_true', help="use compact separators (',', ':')") parser.add_argument('--encoding', default=None, metavar='ENC', help="Specify encoding of the input file") parser.add_argument('--record-buffered', dest='record_buffered', action='store_true', help="Economical buffering of writes at record, not collection (default), level") parser.add_argument('--ignore-errors', dest='ignore_errors', action='store_true', help="log errors but do not stop serialization") parser.add_argument('--use-ld-context', dest='use_ld_context', action='store_true', help="add a JSON-LD context to JSON output") parser.add_argument('--add-ld-context-item', dest='ld_context_items', action='append', metavar='TERM=URI', help="map a term to a URI and add it to the output's JSON LD context") args = parser.parse_args() # Keyword args to be used in all following json.dump* calls. dump_kw = {'sort_keys': True} if args.indent: dump_kw['indent'] = args.indent if args.compact: dump_kw['separators'] = (',', ':') item_sep = args.compact and ',' or ', ' ignore_errors = args.ignore_errors sys.exit(main(args, dump_kw, item_sep, ignore_errors)) Fiona-1.7.10/scripts/fiona.insp000066400000000000000000000006661317446052300163270ustar00rootroot00000000000000#!/usr/bin/env python import sys from fiona.inspector import main if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( prog="fiona.insp", description="Open a data file and drop into an interactive interpreter") parser.add_argument( 'src', metavar='FILE', help="Input dataset file name") args = parser.parse_args() sys.exit(main(args.src)) Fiona-1.7.10/scripts/travis_gdal_install.sh000077500000000000000000000041421317446052300207150ustar00rootroot00000000000000#!/bin/sh set -e GDALOPTS=" --with-ogr \ --with-geos \ --with-expat \ --without-libtool \ --with-libz=internal \ --with-libtiff=internal \ --with-geotiff=internal \ --without-gif \ --without-pg \ --without-grass \ --without-libgrass \ --without-cfitsio \ --without-pcraster \ --without-netcdf \ --with-png=internal \ --with-jpeg=internal \ --without-gif \ --without-ogdi \ --without-fme \ --without-hdf4 \ --without-hdf5 \ --without-jasper \ --without-ecw \ --without-kakadu \ --without-mrsid \ --without-jp2mrsid \ --without-bsb \ --without-grib \ --without-mysql \ --without-ingres \ --without-xerces \ --without-odbc \ --without-curl \ --without-sqlite3 \ --without-dwgdirect \ --without-idb \ --without-sde \ --without-perl \ --without-php \ --without-ruby \ --without-python" # Create build dir if not exists if [ ! -d "$GDALBUILD" ]; then mkdir $GDALBUILD; fi if [ ! -d "$GDALINST" ]; then mkdir $GDALINST; fi ls -l $GDALINST if [ "$GDALVERSION" = "1.9.2" -a ! -d "$GDALINST/gdal-$GDALVERSION" ]; then cd $GDALBUILD wget http://download.osgeo.org/gdal/gdal-$GDALVERSION.tar.gz tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$GDALVERSION ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS make -s -j 2 make install fi # download and compile gdal version if [ "$GDALVERSION" != "1.9.2" -a ! -d "$GDALINST/gdal-$GDALVERSION" ]; then cd $GDALBUILD wget http://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$GDALVERSION ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS make -s -j 2 make install fi # change back to travis build dir cd $TRAVIS_BUILD_DIR Fiona-1.7.10/setup.cfg000066400000000000000000000000601317446052300144560ustar00rootroot00000000000000[nosetests] tests=tests nocapture=1 verbosity=3 Fiona-1.7.10/setup.py000066400000000000000000000237111317446052300143570ustar00rootroot00000000000000from distutils.command.sdist import sdist from distutils import log import logging import os import shutil import subprocess import sys from setuptools import setup from setuptools.extension import Extension # Use Cython if available. try: from Cython.Build import cythonize except ImportError: cythonize = None def check_output(cmd): # since subprocess.check_output doesn't exist in 2.6 # we wrap it here. try: out = subprocess.check_output(cmd) return out.decode('utf') except AttributeError: # For some reasone check_output doesn't exist # So fall back on Popen p = subprocess.Popen(cmd, stdout=subprocess.PIPE) out, err = p.communicate() return out def copy_data_tree(datadir, destdir): try: shutil.rmtree(destdir) except OSError: pass shutil.copytree(datadir, destdir) # Parse the version from the fiona module. with open('fiona/__init__.py', 'r') as f: for line in f: if line.find("__version__") >= 0: version = line.split("=")[1].strip() version = version.strip('"') version = version.strip("'") break # Fiona's auxiliary files are UTF-8 encoded and we'll specify this when # reading with Python 3+ open_kwds = {} if sys.version_info > (3,): open_kwds['encoding'] = 'utf-8' with open('VERSION.txt', 'w', **open_kwds) as f: f.write(version) with open('README.rst', **open_kwds) as f: readme = f.read() with open('CREDITS.txt', **open_kwds) as f: credits = f.read() with open('CHANGES.txt', **open_kwds) as f: changes = f.read() # Set a flag for builds where the source directory is a repo checkout. source_is_repo = os.path.exists("MANIFEST.in") # Extend distutil's sdist command to generate C extension sources from # both `ogrext`.pyx` and `ogrext2.pyx` for GDAL 1.x and 2.x. class sdist_multi_gdal(sdist): def run(self): shutil.copy('fiona/ogrext1.pyx', 'fiona/ogrext.pyx') _ = check_output(['cython', '-v', '-f', 'fiona/ogrext.pyx', '-o', 'fiona/ogrext1.c']) print(_) shutil.copy('fiona/ogrext2.pyx', 'fiona/ogrext.pyx') _ = check_output(['cython', '-v', '-f', 'fiona/ogrext.pyx', '-o', 'fiona/ogrext2.c']) print(_) sdist.run(self) # Building Fiona requires options that can be obtained from GDAL's gdal-config # program or can be specified using setup arguments. The latter override the # former. # # A GDAL API version is strictly required. Without this the setup script # cannot know whether to use the GDAL version 1 or 2 source files. The GDAL # API version can be specified in 2 ways. # # 1. By the gdal-config program, optionally pointed to by GDAL_CONFIG # 2. By a GDAL_VERSION environment variable. This overrides number 1. include_dirs = [] library_dirs = [] libraries = [] extra_link_args = [] gdal_output = [None for i in range(4)] gdalversion = '2' if 'clean' not in sys.argv: try: gdal_config = os.environ.get('GDAL_CONFIG', 'gdal-config') for i, flag in enumerate( ["--cflags", "--libs", "--datadir", "--version"]): gdal_output[i] = check_output([gdal_config, flag]).strip() for item in gdal_output[0].split(): if item.startswith("-I"): include_dirs.extend(item[2:].split(":")) for item in gdal_output[1].split(): if item.startswith("-L"): library_dirs.extend(item[2:].split(":")) elif item.startswith("-l"): libraries.append(item[2:]) else: # e.g. -framework GDAL extra_link_args.append(item) gdalversion = gdal_output[3] if gdalversion: log.info("GDAL API version obtained from gdal-config: %s", gdalversion) except Exception as e: if os.name == "nt": log.info("Building on Windows requires extra options to setup.py " "to locate needed GDAL files.\nMore information is " "available in the README.") else: log.warn("Failed to get options via gdal-config: %s", str(e)) # Get GDAL API version from environment variable. if 'GDAL_VERSION' in os.environ: gdalversion = os.environ['GDAL_VERSION'] log.info("GDAL API version obtained from environment: %s", gdalversion) # Get GDAL API version from the command line if specified there. if '--gdalversion' in sys.argv: index = sys.argv.index('--gdalversion') sys.argv.pop(index) gdalversion = sys.argv.pop(index) log.info("GDAL API version obtained from command line option: %s", gdalversion) if not gdalversion: log.fatal("A GDAL API version must be specified. Provide a path " "to gdal-config using a GDAL_CONFIG environment variable " "or use a GDAL_VERSION environment variable.") sys.exit(1) if os.environ.get('PACKAGE_DATA'): destdir = 'fiona/gdal_data' if gdal_output[2]: log.info("Copying gdal data from %s" % gdal_output[2]) copy_data_tree(gdal_output[2], destdir) else: # check to see if GDAL_DATA is defined gdal_data = os.environ.get('GDAL_DATA', None) if gdal_data: log.info("Copying gdal data from %s" % gdal_data) copy_data_tree(gdal_data, destdir) # Conditionally copy PROJ.4 data. projdatadir = os.environ.get('PROJ_LIB', '/usr/local/share/proj') if os.path.exists(projdatadir): log.info("Copying proj data from %s" % projdatadir) copy_data_tree(projdatadir, 'fiona/proj_data') ext_options = dict( include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args) # Define the extension modules. ext_modules = [] if source_is_repo and "clean" not in sys.argv: # When building from a repo, Cython is required. log.info("MANIFEST.in found, presume a repo, cythonizing...") if not cythonize: log.fatal("Cython.Build.cythonize not found. " "Cython is required to build from a repo.") sys.exit(1) if gdalversion.startswith("1"): log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) shutil.copy('fiona/ogrext1.pyx', 'fiona/ogrext.pyx') else: log.info("Building Fiona for gdal 2.x: {0}".format(gdalversion)) shutil.copy('fiona/ogrext2.pyx', 'fiona/ogrext.pyx') ext_modules = cythonize([ Extension('fiona._geometry', ['fiona/_geometry.pyx'], **ext_options), Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options), Extension('fiona._crs', ['fiona/_crs.pyx'], **ext_options), Extension('fiona._drivers', ['fiona/_drivers.pyx'], **ext_options), Extension('fiona._err', ['fiona/_err.pyx'], **ext_options), Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options)]) # If there's no manifest template, as in an sdist, we just specify .c files. elif "clean" not in sys.argv: ext_modules = [ Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options), Extension('fiona._geometry', ['fiona/_geometry.c'], **ext_options), Extension('fiona._crs', ['fiona/_crs.c'], **ext_options), Extension('fiona._drivers', ['fiona/_drivers.c'], **ext_options), Extension('fiona._err', ['fiona/_err.c'], **ext_options)] if gdalversion.startswith("1"): log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) ext_modules.append( Extension('fiona.ogrext', ['fiona/ogrext1.c'], **ext_options)) else: log.info("Building Fiona for gdal 2.x: {0}".format(gdalversion)) ext_modules.append( Extension('fiona.ogrext', ['fiona/ogrext2.c'], **ext_options)) requirements = [ 'cligj', 'click-plugins', 'six', 'munch'] if sys.version_info < (2, 7): requirements.append('argparse') requirements.append('ordereddict') if sys.version_info < (3, 4): requirements.append('enum34') setup_args = dict( cmdclass={'sdist': sdist_multi_gdal}, metadata_version='1.2', name='Fiona', version=version, requires_python='>=2.6', requires_external='GDAL (>=1.8)', description="Fiona reads and writes spatial data files", license='BSD', keywords='gis vector feature data', author='Sean Gillies', author_email='sean.gillies@gmail.com', maintainer='Sean Gillies', maintainer_email='sean.gillies@gmail.com', url='http://github.com/Toblerity/Fiona', long_description=readme + "\n" + changes + "\n" + credits, package_dir={'': '.'}, packages=['fiona', 'fiona.fio'], entry_points=''' [console_scripts] fio=fiona.fio.main:main_group [fiona.fio_commands] bounds=fiona.fio.bounds:bounds calc=fiona.fio.calc:calc cat=fiona.fio.cat:cat collect=fiona.fio.collect:collect distrib=fiona.fio.distrib:distrib dump=fiona.fio.dump:dump env=fiona.fio.env:env filter=fiona.fio.filter:filter info=fiona.fio.info:info insp=fiona.fio.insp:insp load=fiona.fio.load:load ls=fiona.fio.ls:ls ''', install_requires=requirements, extras_require={ 'calc': ['shapely'], 'test': ['nose']}, tests_require=['nose'], test_suite='nose.collector', ext_modules=ext_modules, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering :: GIS']) if os.environ.get('PACKAGE_DATA'): setup_args['package_data'] = {'fiona': ['gdal_data/*', 'proj_data/*']} setup(**setup_args) Fiona-1.7.10/tests/000077500000000000000000000000001317446052300140035ustar00rootroot00000000000000Fiona-1.7.10/tests/__init__.py000066400000000000000000000025331317446052300161170ustar00rootroot00000000000000import os DATADIR = os.path.abspath('tests/data') FILES = ['coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] def create_zipfile(zipfilename): import zipfile with zipfile.ZipFile(zipfilename, 'w') as zip: for filename in FILES: zip.write(os.path.join(DATADIR, filename), filename) def create_tarfile(tarfilename): import tarfile with tarfile.open(tarfilename, 'w') as tar: for filename in FILES: tar.add(os.path.join(DATADIR, filename), arcname='testing/%s' % filename) def create_jsonfile(jsonfilename): import json import fiona with fiona.open(os.path.join(DATADIR, FILES[0]), 'r') as source: features = [feat for feat in source] my_layer = {'type': 'FeatureCollection', 'features': features} with open(jsonfilename, 'w') as f: f.write(json.dumps(my_layer)) def setup(): """Setup function for nosetests to create test files if they do not exist """ zipfile = os.path.join(DATADIR, 'coutwildrnp.zip') tarfile = os.path.join(DATADIR, 'coutwildrnp.tar') jsonfile = os.path.join(DATADIR, 'coutwildrnp.json') if not os.path.exists(zipfile): create_zipfile(zipfile) if not os.path.exists(tarfile): create_tarfile(tarfile) if not os.path.exists(jsonfile): create_jsonfile(jsonfile) Fiona-1.7.10/tests/data/000077500000000000000000000000001317446052300147145ustar00rootroot00000000000000Fiona-1.7.10/tests/data/LICENSE.txt000066400000000000000000000003231317446052300165350ustar00rootroot00000000000000The coutwildrnp shapefile and all .txt files are extracts from the US National Map's 1:2M scale Wilderness Area boundaries [1] and are in the public domain. [1] http://nationalmap.gov/small_scale/atlasftp.html Fiona-1.7.10/tests/data/collection-pp.txt000066400000000000000000000355011317446052300202310ustar00rootroot00000000000000{ "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ -111.73527526855469, 41.995094299316406 ], [ -111.65931701660156, 41.99627685546875 ], [ -111.6587142944336, 41.9921875 ], [ -111.65888977050781, 41.95676803588867 ], [ -111.67082977294922, 41.91230010986328 ], [ -111.67332458496094, 41.905494689941406 ], [ -111.67088317871094, 41.90049362182617 ], [ -111.66474914550781, 41.893211364746094 ], [ -111.6506576538086, 41.875465393066406 ], [ -111.64759826660156, 41.87091827392578 ], [ -111.64640808105469, 41.86273956298828 ], [ -111.64334869384766, 41.858192443847656 ], [ -111.63720703125, 41.85499572753906 ], [ -111.633544921875, 41.847267150878906 ], [ -111.63053894042969, 41.83409118652344 ], [ -111.6330337524414, 41.82728576660156 ], [ -111.63983154296875, 41.8227653503418 ], [ -111.6484603881836, 41.82188034057617 ], [ -111.66077423095703, 41.82327651977539 ], [ -111.6712417602539, 41.82330322265625 ], [ -111.67618560791016, 41.82013702392578 ], [ -111.68803405761719, 41.78792953491211 ], [ -111.69361114501953, 41.77931594848633 ], [ -111.70162200927734, 41.77797317504883 ], [ -111.70901489257812, 41.77663040161133 ], [ -111.71395111083984, 41.772098541259766 ], [ -111.71891784667969, 41.763031005859375 ], [ -111.72816467285156, 41.75851058959961 ], [ -111.74726104736328, 41.75537109375 ], [ -111.75650024414062, 41.752662658691406 ], [ -111.77067565917969, 41.7445182800293 ], [ -111.77064514160156, 41.75495910644531 ], [ -111.75585174560547, 41.76219940185547 ], [ -111.7330551147461, 41.766693115234375 ], [ -111.72749328613281, 41.77212905883789 ], [ -111.71883392333984, 41.7834587097168 ], [ -111.71080780029297, 41.78889083862305 ], [ -111.70340728759766, 41.79250717163086 ], [ -111.70030212402344, 41.798404693603516 ], [ -111.70210266113281, 41.8088493347168 ], [ -111.70760345458984, 41.819759368896484 ], [ -111.71312713623047, 41.82340621948242 ], [ -111.71929168701172, 41.82341766357422 ], [ -111.72545623779297, 41.8225212097168 ], [ -111.7341537475586, 41.803016662597656 ], [ -111.740966796875, 41.79213333129883 ], [ -111.74531555175781, 41.78215408325195 ], [ -111.77122497558594, 41.7658576965332 ], [ -111.77056884765625, 41.77811813354492 ], [ -111.7662582397461, 41.778106689453125 ], [ -111.76746368408203, 41.78628158569336 ], [ -111.76253509521484, 41.78627395629883 ], [ -111.76241302490234, 41.82259750366211 ], [ -111.77104187011719, 41.8221549987793 ], [ -111.77161407470703, 41.83351135253906 ], [ -111.7333755493164, 41.84524154663086 ], [ -111.73274993896484, 41.847511291503906 ], [ -111.7376708984375, 41.84979248046875 ], [ -111.77157592773438, 41.845767974853516 ], [ -111.77215576171875, 41.85802459716797 ], [ -111.75243377685547, 41.85844802856445 ], [ -111.72467803955078, 41.86384201049805 ], [ -111.71109771728516, 41.868804931640625 ], [ -111.70182037353516, 41.87604904174805 ], [ -111.69624328613281, 41.88193893432617 ], [ -111.69497680664062, 41.88874816894531 ], [ -111.70053100585938, 41.89057540893555 ], [ -111.70793151855469, 41.88923263549805 ], [ -111.72091674804688, 41.87972640991211 ], [ -111.73388671875, 41.87384796142578 ], [ -111.75301361083984, 41.86888885498047 ], [ -111.75350952148438, 41.90249252319336 ], [ -111.74364471435547, 41.90247344970703 ], [ -111.74463653564453, 41.967864990234375 ], [ -111.7119369506836, 41.96416473388672 ], [ -111.69283294677734, 41.95912551879883 ], [ -111.68911743164062, 41.96047592163086 ], [ -111.6891098022461, 41.96320343017578 ], [ -111.69341278076172, 41.96684646606445 ], [ -111.70449829101562, 41.972320556640625 ], [ -111.7341079711914, 41.97828674316406 ], [ -111.73527526855469, 41.995094299316406 ] ] ] }, "type": "Feature", "id": "0", "properties": { "PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT" } }, { "geometry": { "type": "Polygon", "coordinates": [ [ [ -112.00384521484375, 41.552703857421875 ], [ -112.00446319580078, 41.56586456298828 ], [ -112.0112075805664, 41.56586456298828 ], [ -112.01121520996094, 41.57902526855469 ], [ -112.01734924316406, 41.57902526855469 ], [ -112.0173568725586, 41.594459533691406 ], [ -112.02779388427734, 41.5940055847168 ], [ -112.02779388427734, 41.60171890258789 ], [ -112.03945922851562, 41.60126495361328 ], [ -112.04007720947266, 41.608524322509766 ], [ -112.04744720458984, 41.608524322509766 ], [ -112.0474624633789, 41.62804412841797 ], [ -112.05974578857422, 41.62758255004883 ], [ -112.05975341796875, 41.640296936035156 ], [ -112.050537109375, 41.64030075073242 ], [ -112.05054473876953, 41.64983367919922 ], [ -112.04132843017578, 41.64983367919922 ], [ -112.04195404052734, 41.66299819946289 ], [ -112.05793762207031, 41.662540435791016 ], [ -112.0579605102539, 41.692047119140625 ], [ -112.07394409179688, 41.692039489746094 ], [ -112.07459259033203, 41.72381591796875 ], [ -112.06167602539062, 41.72382354736328 ], [ -112.0616683959961, 41.71383285522461 ], [ -112.05490112304688, 41.713836669921875 ], [ -112.04137420654297, 41.71384048461914 ], [ -112.04138946533203, 41.7379035949707 ], [ -112.0376968383789, 41.74108123779297 ], [ -112.03339385986328, 41.741085052490234 ], [ -112.02908325195312, 41.729736328125 ], [ -112.02599334716797, 41.71657180786133 ], [ -112.0241470336914, 41.71157455444336 ], [ -112.0272216796875, 41.704769134521484 ], [ -112.02413940429688, 41.70068359375 ], [ -112.01676177978516, 41.69977951049805 ], [ -112.01615142822266, 41.7070426940918 ], [ -112.00508117675781, 41.707496643066406 ], [ -112.00508117675781, 41.66618347167969 ], [ -111.9792709350586, 41.6666374206543 ], [ -111.9786605834961, 41.653926849365234 ], [ -111.96821594238281, 41.65346908569336 ], [ -111.96760559082031, 41.6407585144043 ], [ -111.96146392822266, 41.6407585144043 ], [ -111.96025085449219, 41.61125183105469 ], [ -111.95042419433594, 41.61124801635742 ], [ -111.94796752929688, 41.60988235473633 ], [ -111.94735717773438, 41.60761260986328 ], [ -111.9522705078125, 41.60443878173828 ], [ -111.96455383300781, 41.60262680053711 ], [ -111.9682388305664, 41.60398864746094 ], [ -111.9725341796875, 41.60807418823242 ], [ -111.97560119628906, 41.60943603515625 ], [ -111.97928619384766, 41.61034393310547 ], [ -111.98542785644531, 41.609439849853516 ], [ -111.98481750488281, 41.58356475830078 ], [ -111.97868347167969, 41.58356857299805 ], [ -111.97745513916016, 41.570404052734375 ], [ -111.97132110595703, 41.57085418701172 ], [ -111.97132110595703, 41.56450271606445 ], [ -111.98297882080078, 41.564048767089844 ], [ -111.98175811767578, 41.54090118408203 ], [ -111.98176574707031, 41.53545379638672 ], [ -112.00323486328125, 41.53545379638672 ], [ -112.00384521484375, 41.552703857421875 ] ] ] }, "type": "Feature", "id": "1", "properties": { "PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT" } } ] } Fiona-1.7.10/tests/data/collection.txt000066400000000000000000000154051317446052300176150ustar00rootroot00000000000000{"type": "FeatureCollection", "features": [{"geometry": {"type": "Polygon", "coordinates": [[[-111.73527526855469, 41.995094299316406], [-111.65931701660156, 41.99627685546875], [-111.6587142944336, 41.9921875], [-111.65888977050781, 41.95676803588867], [-111.67082977294922, 41.91230010986328], [-111.67332458496094, 41.905494689941406], [-111.67088317871094, 41.90049362182617], [-111.66474914550781, 41.893211364746094], [-111.6506576538086, 41.875465393066406], [-111.64759826660156, 41.87091827392578], [-111.64640808105469, 41.86273956298828], [-111.64334869384766, 41.858192443847656], [-111.63720703125, 41.85499572753906], [-111.633544921875, 41.847267150878906], [-111.63053894042969, 41.83409118652344], [-111.6330337524414, 41.82728576660156], [-111.63983154296875, 41.8227653503418], [-111.6484603881836, 41.82188034057617], [-111.66077423095703, 41.82327651977539], [-111.6712417602539, 41.82330322265625], [-111.67618560791016, 41.82013702392578], [-111.68803405761719, 41.78792953491211], [-111.69361114501953, 41.77931594848633], [-111.70162200927734, 41.77797317504883], [-111.70901489257812, 41.77663040161133], [-111.71395111083984, 41.772098541259766], [-111.71891784667969, 41.763031005859375], [-111.72816467285156, 41.75851058959961], [-111.74726104736328, 41.75537109375], [-111.75650024414062, 41.752662658691406], [-111.77067565917969, 41.7445182800293], [-111.77064514160156, 41.75495910644531], [-111.75585174560547, 41.76219940185547], [-111.7330551147461, 41.766693115234375], [-111.72749328613281, 41.77212905883789], [-111.71883392333984, 41.7834587097168], [-111.71080780029297, 41.78889083862305], [-111.70340728759766, 41.79250717163086], [-111.70030212402344, 41.798404693603516], [-111.70210266113281, 41.8088493347168], [-111.70760345458984, 41.819759368896484], [-111.71312713623047, 41.82340621948242], [-111.71929168701172, 41.82341766357422], [-111.72545623779297, 41.8225212097168], [-111.7341537475586, 41.803016662597656], [-111.740966796875, 41.79213333129883], [-111.74531555175781, 41.78215408325195], [-111.77122497558594, 41.7658576965332], [-111.77056884765625, 41.77811813354492], [-111.7662582397461, 41.778106689453125], [-111.76746368408203, 41.78628158569336], [-111.76253509521484, 41.78627395629883], [-111.76241302490234, 41.82259750366211], [-111.77104187011719, 41.8221549987793], [-111.77161407470703, 41.83351135253906], [-111.7333755493164, 41.84524154663086], [-111.73274993896484, 41.847511291503906], [-111.7376708984375, 41.84979248046875], [-111.77157592773438, 41.845767974853516], [-111.77215576171875, 41.85802459716797], [-111.75243377685547, 41.85844802856445], [-111.72467803955078, 41.86384201049805], [-111.71109771728516, 41.868804931640625], [-111.70182037353516, 41.87604904174805], [-111.69624328613281, 41.88193893432617], [-111.69497680664062, 41.88874816894531], [-111.70053100585938, 41.89057540893555], [-111.70793151855469, 41.88923263549805], [-111.72091674804688, 41.87972640991211], [-111.73388671875, 41.87384796142578], [-111.75301361083984, 41.86888885498047], [-111.75350952148438, 41.90249252319336], [-111.74364471435547, 41.90247344970703], [-111.74463653564453, 41.967864990234375], [-111.7119369506836, 41.96416473388672], [-111.69283294677734, 41.95912551879883], [-111.68911743164062, 41.96047592163086], [-111.6891098022461, 41.96320343017578], [-111.69341278076172, 41.96684646606445], [-111.70449829101562, 41.972320556640625], [-111.7341079711914, 41.97828674316406], [-111.73527526855469, 41.995094299316406]]]}, "type": "Feature", "id": "0", "properties": {"PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT"}}, {"geometry": {"type": "Polygon", "coordinates": [[[-112.00384521484375, 41.552703857421875], [-112.00446319580078, 41.56586456298828], [-112.0112075805664, 41.56586456298828], [-112.01121520996094, 41.57902526855469], [-112.01734924316406, 41.57902526855469], [-112.0173568725586, 41.594459533691406], [-112.02779388427734, 41.5940055847168], [-112.02779388427734, 41.60171890258789], [-112.03945922851562, 41.60126495361328], [-112.04007720947266, 41.608524322509766], [-112.04744720458984, 41.608524322509766], [-112.0474624633789, 41.62804412841797], [-112.05974578857422, 41.62758255004883], [-112.05975341796875, 41.640296936035156], [-112.050537109375, 41.64030075073242], [-112.05054473876953, 41.64983367919922], [-112.04132843017578, 41.64983367919922], [-112.04195404052734, 41.66299819946289], [-112.05793762207031, 41.662540435791016], [-112.0579605102539, 41.692047119140625], [-112.07394409179688, 41.692039489746094], [-112.07459259033203, 41.72381591796875], [-112.06167602539062, 41.72382354736328], [-112.0616683959961, 41.71383285522461], [-112.05490112304688, 41.713836669921875], [-112.04137420654297, 41.71384048461914], [-112.04138946533203, 41.7379035949707], [-112.0376968383789, 41.74108123779297], [-112.03339385986328, 41.741085052490234], [-112.02908325195312, 41.729736328125], [-112.02599334716797, 41.71657180786133], [-112.0241470336914, 41.71157455444336], [-112.0272216796875, 41.704769134521484], [-112.02413940429688, 41.70068359375], [-112.01676177978516, 41.69977951049805], [-112.01615142822266, 41.7070426940918], [-112.00508117675781, 41.707496643066406], [-112.00508117675781, 41.66618347167969], [-111.9792709350586, 41.6666374206543], [-111.9786605834961, 41.653926849365234], [-111.96821594238281, 41.65346908569336], [-111.96760559082031, 41.6407585144043], [-111.96146392822266, 41.6407585144043], [-111.96025085449219, 41.61125183105469], [-111.95042419433594, 41.61124801635742], [-111.94796752929688, 41.60988235473633], [-111.94735717773438, 41.60761260986328], [-111.9522705078125, 41.60443878173828], [-111.96455383300781, 41.60262680053711], [-111.9682388305664, 41.60398864746094], [-111.9725341796875, 41.60807418823242], [-111.97560119628906, 41.60943603515625], [-111.97928619384766, 41.61034393310547], [-111.98542785644531, 41.609439849853516], [-111.98481750488281, 41.58356475830078], [-111.97868347167969, 41.58356857299805], [-111.97745513916016, 41.570404052734375], [-111.97132110595703, 41.57085418701172], [-111.97132110595703, 41.56450271606445], [-111.98297882080078, 41.564048767089844], [-111.98175811767578, 41.54090118408203], [-111.98176574707031, 41.53545379638672], [-112.00323486328125, 41.53545379638672], [-112.00384521484375, 41.552703857421875]]]}, "type": "Feature", "id": "1", "properties": {"PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT"}}]}Fiona-1.7.10/tests/data/coutwildrnp.dbf000066400000000000000000001243411317446052300177500ustar00rootroot00000000000000_CaPERIMETERNFEATURE2CPNAMECPFEATURE1CPURLCeAGBURCPAREANSTATE_FIPSCPWILDRNP020N STATECP 1.221070000000000 Mount Naomi Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi FS 0.01792640000000049 332UT 0.755827000000000 Wellsville Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain FS 0.01044410000000049 336UT 1.708510000000000 Mount Zirkel Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Zirkel FS 0.07149550000000008 357CO 2.232410000000000 High Uintas Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=High%20Uintas FS 0.18291900000000049 358UT 1.054580000000000 Rawah Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Rawah FS 0.03373710000000008 359CO 0.418340000000000 Mount Olympus Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Olympus FS 0.00633137000000049 364UT 1.760390000000000 Comanche Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Comanche%20Peak FS 0.03197700000000008 365CO 0.462863000000000 Cache La Poudre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Cache%20La%20Poudre FS 0.00481977000000008 366CO 0.315219000000000 Twin Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Twin%20Peaks FS 0.00477962000000049 367UT 0.329520000000000 Neota Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Neota FS 0.00576742000000008 369CO 0.518395000000000 Lone Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lone%20Peak FS 0.01251300000000049 371UT 0.477348000000000 Deseret Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Deseret%20Peak FS 0.01077180000000049 373UT 0.675146000000000 Never Summer Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Never%20Summer FS 0.00908863000000008 374CO 0.288683000000000 Mount Timpanogos Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Timpanogos FS 0.00442921000000049 375UT 0.768802000000000 Sarvis Creek Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sarvis%20Creek FS 0.01957160000000008 376CO 1.372940000000000 Indian Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Indian%20Peaks FS 0.03140190000000008 378CO 2.029470000000000 Flat Tops Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Flat%20Tops FS 0.10322100000000008 380CO 0.765491000000000 James Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=James%20Peak FS 0.00676706000000008 384CO 0.726088000000000 Mount Nebo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Nebo FS 0.01203290000000049 385UT 0.376165000000000 Byers Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Byers%20Peak FS 0.00345459000000008 386CO 0.528667000000000 Vasquez Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Vasquez%20Peak FS 0.00542539000000008 387CO 1.257970000000000 Eagles Nest Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Eagles%20Nest FS 0.05923840000000008 388CO 0.522076000000000 Ptarmigan Peak Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Ptarmigan%20Peak FS 0.00574553000000008 389CO 1.078160000000000 Mount Evans Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Evans FS 0.03254480000000008 390CO 1.438710000000000 Holy Cross Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Holy%20Cross FS 0.05451810000000008 391CO 1.463510000000000 Lost Creek Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lost%20Creek FS 0.04967140000000008 396CO 1.063300000000000 Hunter-Fryingpan Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Hunter%2DFryingpan FS 0.03224480000000008 398CO 1.458040000000000 Maroon Bells-Snowmass Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Maroon%20Bells%2DSnowmass FS 0.07808400000000008 399CO 0.738527000000000 Mount Massive Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Massive FS 0.01047520000000008 400CO 0.193332000000000 Mount Massive Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Massive FWS 0.00093778200000008 401CO 0.820306000000000 Buffalo Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Buffalo%20Peaks FS 0.01711430000000008 404CO 2.025460000000000 Collegiate Peaks Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Collegiate%20Peaks FS 0.07376710000000008 405CO 0.907013000000000 Raggeds Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Raggeds FS 0.02990640000000008 406CO 1.644000000000000 West Elk Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=West%20Elk FS 0.07623760000000008 415CO 0.538332000000000 Fossil Ridge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Fossil%20Ridge FS 0.01317560000000008 419CO 0.826888000000000 Gunnison Gorge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Gunnison%20Gorge BLM 0.00739996000000008 420CO 0.707377000000000 Black Canyon of the Gunnison Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Black%20Canyon%20of%20the%20GunnisonNPS 0.00663470000000008 425CO 0.735176000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.01336290000000008 427CO 0.393427000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00685532000000008 433CO 0.829067000000000 Powderhorn Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Powderhorn BLM 0.01925610000000008 438CO 0.446917000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00708528000000008 440CO 1.224290000000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre FS 0.04125950000000008 442CO 0.047141800000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00013151100000008 444CO 0.349875000000000 Powderhorn Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Powderhorn FS 0.00614416000000008 446CO 0.733543000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00914042000000008 447CO 0.065853400000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00026714500000008 449CO 1.616820000000000 La Garita Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=La%20Garita FS 0.05340450000000008 451CO 0.407763000000000 Mount Sneffels Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Sneffels FS 0.00702802000000008 452CO 0.196611000000000 Uncompahgre Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Uncompahgre BLM 0.00150059000000008 454CO 0.860610000000000 Box-Death Hollow Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Box%2DDeath%20Hollow FS 0.00997307000000049 455UT 0.779127000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo NPS 0.01755200000000008 458CO 0.561821000000000 Greenhorn Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Greenhorn%20Mountain FS 0.00975499000000008 461CO 0.171344000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00099257300000008 462CO 0.697435000000000 Lizard Head Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Lizard%20Head FS 0.01764510000000008 465CO 1.742490000000000 Dark Canyon Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Dark%20Canyon FS 0.02586860000000049 468UT 0.574187000000000 Great Sand Dunes Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Great%20Sand%20Dunes NPS 0.01359200000000008 470CO 0.133740000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00093922600000008 471CO 3.301370000000000 Weminuche Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Weminuche FS 0.19608500000000008 472CO 0.454338000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00750880000000008 479CO 0.275139000000000 Ashdown Gorge Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Ashdown%20Gorge FS 0.00342165000000049 480UT 0.336214000000000 Sangre de Cristo Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Sangre%20de%20Cristo FS 0.00394863000000008 482CO 0.191092000000000 Weminuche Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Weminuche FS 0.00123684000000008 487CO 0.736247000000000 Pine Valley Mountain Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Pine%20Valley%20Mountain FS 0.02126900000000049 499UT 2.032130000000000 South San Juan Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=South%20San%20Juan FS 0.07043340000000008 504CO 0.263251000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00218289000000008 509CO 0.119581000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00053934000000008 510CO 0.120627000000000 Mesa Verde Wilderness Wilderness http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mesa%20Verde NPS 0.00081771100000008 511CO Fiona-1.7.10/tests/data/coutwildrnp.prj000066400000000000000000000002171317446052300200030ustar00rootroot00000000000000GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]Fiona-1.7.10/tests/data/coutwildrnp.shp000066400000000000000000003257541317446052300200220ustar00rootroot00000000000000' d\`ԈB@">ZD@k[`LD@Z[D@R[@_D@@2[D@`([D@@+[`wD@[@D@[@D@[`CD@@[TD@`[@D@@r[@zD@^[@nD@,[@D@[pD@[@sD@Z[D@[D@[`PD@`[`3D@ J[ aD@[bD@F[@D@[D@ d[D@`[D@`[hD@`[ D@[D@@[D@ [D@j[@WD@R[`LD@@R[D@_[D@`[#D@@[ D@`[`HD@}[`D@[pD@[ 2D@@[`D@`I[D@[`eD@[eD@m[`HD@`[@D@l[dD@@[D@[[D@Q[`D@` [D@ [D@`[D@`[JD@X[`[`D@@p[D@[D@@[ D@@[@D@[`D@[D@ *[D@ *[AD@ [2D@ [@Z{D@ ߵZ2|D@iZn|D@`Z`{D@ hZ|D@Z@}D@[GD@[`qD@n/[kmD@@[ nlD@[lkD@[fiD@[hD@ં[hD@@[?hD@@u[gD@`[gD@[gD@Y[@gD@R[gD@[gD@`W[@>gD@`[{gD@[$gD@v[reD@o[WbD@[`^D@![,\D@[@yZD@ [YD@[`4WD@n[}VD@ [TD@[`*SD@`[RD@@N[RD@࿕[@QD@@[ -PD@@[`wOD@8[ ND@`[@LD@@[ KD@[ND@ ל["RD@`[`QD@[PD@[QOD@)[@8ND@ [@LD@w[jLD@[LD@Š[LD@`[`LD@ [`LD@`)[ LD@ [2LD@ޡ[@KD@ף[KD@[KD@J[KD@J[LD@[LD@[!KD@][`LD@`[ gLD@[`TKD@`[LD@[ WLD@[KD@ȳ[ID@G[ GD@[GD@[ 'HD@S[`nMD@[ND@M[PD@w[8SD@ D[TD@[#TD@`¶[mTD@@3[@sVD@श[@YD@`[ZD@([\D@`8[]D@ȵ[`^D@`[``D@H[ dD@[eD@@[@TfD@[ egD@>[^gD@1[ gD@[`5hD@`Z[YD@~ZYD@@~Z:ZD@LZZD@`jZ [D@`tZc[D@`jZ`\D@`jZ@~\D@Z\D@@Z ]D@` Z]D@ ЀZ8_D@Z`D@`[Z`D@`tZ)aD@ yZAaD@ ZaD@Z bD@Z`cD@TZ (dD@ZdD@˂Z@`eD@`߂Z ?fD@@ZfD@9Z@gD@tZ@gD@Z VhD@ Z@hD@Z iD@`pZiD@ ZiD@ʅZ 1jD@Z@jD@ZjD@-ZjD@ ZjD@ZkD@Z KkD@ZlD@ Z@lD@`ZmD@ZmD@ ZlD@ZlD@ZqmD@ ZmD@`PZnD@ZnD@ŃZ@mD@`kZ@mD@@‚Z@`nD@6ZnD@`ƀZoD@VZoD@`!~ZoD@|ZoD@@|Z jD@@0|Z jD@`0|Z hD@{Z`fD@ .{Z`eD@SzZbdD@QyZDdD@`QyZ@KbD@yZ@D@]Z@=D@]ZJ=D@k]Z]Z@VD@ ]ZJWD@\Z@WD@\ZXD@9\Z}YD@[ZAYD@[ZWD@ [ZWD@[Z *WD@ `['ID@@[;PD@[6ND@n[ND@@V[,PD@ [;PD@J[@OD@ [ND@ "[@MD@>[ LD@[&LD@@[@KD@[@JD@[=JD@[pID@ ['ID@[8ID@ [@&JD@`[JD@[LD@[7ND@[6ND@  xZD@tZ`>D@3tZ >D@`tZD@ vZ>D@{vZ?D@vZ @D@ vZ`@D@@vZAD@wZ BD@@xZBD@ixZBD@ xZBD@ xZ@CD@@xZ;CD@xZ CD@xZCD@xZDD@ UxZED@wZFD@@wZGD@fwZVHD@HwZ HD@ wZID@ p@[v6D@w[FD@+@D[FD@[OFD@[`"FD@ [FD@G[FD@[FD@M[FD@@[FD@`[xFD@[ED@[`DD@[cCD@a[BD@`[@BD@[`BD@@j[BD@`[H=D@w[@;D@@[1;D@p[99D@[7D@@[7D@[@8D@ [8D@[7D@@[ 7D@ [@6D@[v6D@[ 7D@[7D@3[ 8D@(["9D@ 2[@:D@0[ =D@0[ \>D@@ [k>D@m[?D@[.?D@[@D@@[DD@@[DD@[7ED@@D[FD@ 0@+\1D@ $\`CD@#+\@D@'+\@D@+\BD@a)\MBD@@'\nBD@`K'\CD@ &\`CD@5&\`(BD@ &\ AD@&\`{?D@&\>D@&\a>D@&\@6>D@`%\@=D@ %\=D@ %\@;D@ %\)9D@%\`l7D@`%\4D@ $\`^3D@ $\2D@^%\@2D@%\22D@#&\ 2D@&\2D@`9'\2D@k(\v2D@)\1D@`y)\`4D@*\ 4D@*\@7D@@+\@7D@`+\ 9D@+\)9D@+\@D@ 0Zm'D@ wZ mZ0D@ mZD@@mZD@mZKD@ mZD@GmZD@lZD@lZD@lZ~D@mmZ mD@mZfD@NnZ F D@nZ@ D@@oZ D@oZ D@oZ D@`oZ D@oZ D@@oZ@ D@aoZ D@%oZ` D@oZ D@ %oZ D@}oZ D@oZD@oZD@UpZ`D@7qZD@ AqZD@rZD@rZD@UsZ D@qsZ`D@`sZD@rZ`<D@@rZ}D@`qZ~D@@aqZ@oD@"qZ@YD@ qZ8D@pZD@ pZ`)D@oZjD@soZ xD@oZ`D@nZD@`nZ@D@DnZ@D@ (nZpD@nZHD@ nZD@mZD@ mZ`D@mZ D@@,mZ@*D@0mZ`D@`mZ D@lZ@D@lZD@dlZ)D@kZD@jZKD@jZ`D@|jZD@iZwD@ [iZ{D@@$iZPD@@hZ@D@LhZ@D@gZD@NgZD@@*gZD@fZhD@fZ>D@fZDD@ eZ |D@eZD@ eZ@D@dZ .D@`dZD@dZD@@dZiD@@^dZ@D@@cZD@cZxD@5cZD@bZ9D@bZbD@@bZ`8D@bcZ D@cZD@`cZjD@cZD@@cZD@bZ ND@bZD@[cZD@@cZD@@dZD@4eZD@feZD@ feZhD@ReZ-D@ eZ@D@dZMD@`dZ@D@`dZ$D@dZ@ D@`dZ@ D@@dZ D@_dZw D@ cZ` D@cZ@z D@@cZ0 D@dZ D@kdZ`C D@eZ& D@WeZ` D@keZ@ D@leZ`D@eZ`.D@ eZD@fZoD@ciZD@FiZJD@ 3iZ`D@ hZD@ hZ D@ iZPD@yiZD@ iZ 7D@@iZ vD@@iZ:D@ziZ`D@ fZ D@`fZD@zdZ D@|dZ`D@dZC@ eZC@fZ`oC@VfZ`4C@fZ4C@fZsC@gZ@C@@igZ2C@gZ}C@@AhZC@@hZC@@hZ`C@ iZDC@iZC@ hZ`C@`hZ C@@hZhC@`9hZwC@!hZEC@gZ C@gZ@C@`VgZ`C@ CgZjC@gZC@ gZC@`gZC@fZC@`fZC@@fZ`xC@@fZ@gC@@fZ XC@fZ@&C@gZ C@`zgZC@@XgZ@OC@XgZ@C@XgZC@gZeC@gZ*C@2hZ@C@vhZC@hZC@hZC@iZ>C@ C@ IZC@GZC@dZC@ZC@ZC@@ZC@ZC@`Z@C@ ZqC@`fZ`RC@Z`C@\ZC@ZC@ZC@Z C@ ZC@@$Z`(C@3Z C@=Z@C@ dZ`C@mZ1C@ EZC@ZUC@`Z C@ nZ@C@ZC@@Z`#C@/Z@C@-Z FC@Z9C@@@ZC@ZC@ ZC@@ZuC@@Z C@&ZhC@MZ@JC@`ZC@@Z"C@`Z@C@;ZC@@XZC@Z`C@`Z`C@ZVC@KZC@ZyC@@`Z`C@ ZC@ Z&C@`>Z C@`Z`"C@Z C@hZ NC@@ZC@`ZpC@ ZvC@Z@C@ZC@Z@hC@jZ9C@`ZC@ZC@ZC@Z PC@`Z@C@[Z`Z`/C@Z`C@aZC@ Z7C@ Z(C@XZ@ C@tZC@ZTC@LZC@1Z C@EZ>C@ZC@ZC@Z C@ZqC@@ZC@ Z`C@Z`C@Z C@`Z`8C@@ZC@Z~C@@|ZC@`3ZKC@`ZPC@ZC@@Z9C@Z C@fZC@HZC@Z@4C@ZtC@/ZhC@Z`C@`Z@7C@Z@C@Z C@@HZC@Z C@Z`C@`OZC@ ZRC@\ZC@ #Z@qC@`8Z@C@yZtC@fZC@ZC@ZHD@Z 7D@MZsD@Z D@[ QC@ >[C@`=[@C@ F[C@[C@@@rZC@{ZC@E{Z`OC@'|ZC@S|ZC@ /|ZC@"|ZC@ |Z%C@|Z@C@@}Z`C@}Z`RC@}Z@!C@ }Z /C@}ZC@@*}Z@C@ }ZGC@@$}ZC@[}ZC@b}ZC@S}ZC@|ZC@.}Z`C@@@}ZC@ }Z`C@}ZC@Y~Z`4C@[~Z aC@}Z`C@@}Z`"C@ G~Z&C@`~ZyC@~Z`C@ \~Z@#C@]~Z AC@~Z#C@kZC@`Z C@`Z`[C@ZEC@Z`C@ Z C@\Z C@ZC@UZ`8C@`XZC@ ZC@ Z@4C@@rZ@C@Z@C@ ZC@ZC@IZ]C@`ZaC@`qZC@ Z`C@`7Z`WC@@Z C@`ZC@Z@C@ZC@`ԋZC@ZSC@^Z5C@TZC@@hZ HC@{Z}ZC@ |ZC@`|Z`C@}|ZwC@l|Z\C@`_|ZHC@ %|ZC@|ZC@&|Z`C@mZ$C@`]Z`C@`gZ/C@gZ $C@gZ@#C@gZDC@`VgZ 'C@tgZC@~gZ@fC@jgZC@`0gZC@fZhC@fZJC@fZ C@`mfZkC@`FfZ`\C@ fZ C@@eZ@C@eZ@:C@eZ@C@eZ'C@ceZ@C@OeZC@`YeZAC@@OeZ`C@`1eZ[C@dZ@C@@dZC@dZ +C@dZWC@ cZ4C@cZ`C@`bZ$C@[bZ C@faZ>C@5aZ`>C@``Z C@s`Z C@_ZBC@_ZC@`.aZ`uC@ aZ ;C@@aZ`C@bZ`C@cZC@`AcZfC@AcZC@_cZ =C@cZC@cZ C@`cZnC@ cZC@cZ bC@tcZ`5C@@0cZ4C@cZpC@bZ~C@@YbZC@aZC@4aZC@`Z C@`Z`C@`ZVC@_Z VC@/_Z UC@^ZC@`2^Z tC@R]ZC@ ]Z`C@`]Z`WC@6]ZC@@!^Z C@f^ZC@@^ZC@` _ZbC@@Q_ZC@_ZC@`_Z`C@9aZ gC@ aZC@bZ C@^bZ`9C@`bZ C@@rbZ`xC@`YbZC@bZ +C@@cZpC@dZC@IeZ`C@@eZC@`DfZ2C@ QfZ@C@@=eZ@C@zeZ`7C@ dZ,C@eZ$C@ gZIC@hZ C@^kZ`sC@zkZC@`kZ`C@`lZ`zC@@ZlZ`hC@lZC@mZC@}mZ`_C@@mZ C@`mZ@kC@mZC@mZ@C@ mZC@JmZC@mZ`MC@lZ C@mZ ;C@ImZC@pmZC@mZdC@mZ C@mZ`C@mZC@mZC@mZKC@mZC@ZmZC@dmZ&C@mZC@ mZC@mZ@C@mZC@mZ C@umZC@:mZC@@mZC@`lZC@ lZ@EC@ClZ@C@jZ@C@@jZ@oC@PjZ QC@jZ@$C@>iZ@C@hZC@]hZ`C@@hZ`C@`gZ/C@שZ¤C@~Z9C@uZ C@Z C@ZC@ ZC@tZC@`~Z`C@̚ZrC@ߚZ@EC@Z@+C@ޚZC@"ZiC@?Z C@>ZC@RZC@xZC@@ZUC@@xZ޿C@ZZwC@Z C@`Z`C@Z ˽C@`Z ڽC@ZC@ZǾC@Z kC@@Z`,C@UZC@`ZC@ZC@ :ZC@`Z C@@Z6C@~ZC@ZC@#ZԻC@Z`TC@Z ·C@Z` C@ZC@@Z@RC@(Z 4C@YZQC@Z@C@ZǴC@ZKC@ZC@8ZC@Z C@@˞ZC@Z@C@@"Z@ C@"Z@C@Z=C@ZC@qZdzC@qZ@C@Z_C@zZC@6ZC@ZUC@ZdC@`iZC@ZC@@לZC@XZC@ZdC@Z@γC@`TZ C@sZC@טZC@ZC@bZC@ ;Z >C@`'Z@ֲC@DZC@7ZC@`^Z@RC@{ZC@ {ZC@{ZFC@\Z C@@\Z?C@ZC@Z`C@Z9C@ZC@ZC@`ęZթC@@ZOC@aZ C@WZ@ΧC@ZtC@`™Z C@ ߙZ ZC@ԙZ`{C@`ޙZ?C@5ZC@@WZC@ ^Z¤C@ Z`ΤC@ZC@`iZ C@`ZC@Z C@ SZ@C@`zZ^C@`ZC@ZC@@Z)C@ZC@Z C@Z@bC@ ZC@`Z#C@ Z@@C@-ZC@ZC@ Z(C@@jZཫC@9ZC@Z@ܫC@ġZ@C@Z`ΫC@Z C@ Z C@`Z`nC@@Z@֭C@ ڡZ@C@bZC@`ZC@_ZsC@_Z༤C@`_Z@QC@_ZC@@_ZzC@`Z`C@V`ZC@ `Z@C@`ZC@`Z C@6aZ/C@aZ ԧC@PbZ`=C@`bZ@ĨC@cZC@dZݪC@ dZ@FC@(eZdC@eZC@LfZ@EC@@fZ C@gZC@SgZsC@gZ)C@gZ C@ hZC@iZIC@hjZ`*C@`rjZVC@ rjZC@jZ6C@ kZڮC@@4kZ`$C@lZC@`lZկC@tmZ 5C@`!nZC@`kZ"C@kZC@lZ C@@lZC@lZ`C@kZ;C@ {jZC@iZҸC@`hZC@fZC@dZ@C@:cZ C@aZ@^C@@|ZC@nZ1C@p{ZC@@ZwC@ZC@۟Z/C@ПZฝC@Z3C@nZC@`Z C@ZÚC@ZǙC@ZC@˟ZC@"ZC@\ZC@eZC@xZkC@٠Z }C@ Z@AC@`tZ@OC@ZC@6ZMC@`ZC@ Z`xC@ GZiC@ ZC@`Z@MC@ Z@C@௣Z3C@`Z`TC@ZC@ZC@Z`ϏC@@uZC@Z-C@`ZC@ZHC@eZ`C@ǧZ{C@ZC@`mZ@C@Z ϐC@ZːC@_ZC@ZC@mZ C@MZC@Z9C@TZC@@|Z/C@ /ZC@ZC@@Z@"C@@үZC@ZZC@+Z)C@@KZəC@@BZ]C@ͯZ|C@Z@C@Z kC@=ZC@ZÜC@ZC@DZǜC@Z &C@ =Z`C@࡫Z@sC@ Z`C@ZiC@3Z@MC@ Z@C@`Z@hC@Z@C@`RZFC@`Z@C@@ZൟC@ Z@C@ZrC@`ZOC@@ZC@Z@C@ZC@(Z{C@ Z@ӥC@Z+C@Z`C@`Z`C@@ZAC@Z@oC@:Z1C@OZ C@ZШC@ PZ`AC@ZC@)ZZ`C@ZuC@-ZC@ˆZ:C@ NZC@@ZC@ZڇC@xZC@wZyC@PZ@C@Z@C@Z`C@ZC@ Z@C@@PZC@ PZC@2Z C@ ZՀC@Z|C@Z@nC@ZǀC@@Z`C@Z+C@aZC@ZC@@eZނC@ ZC@ZC@@ZC@;Z ԀC@@̃Zl}C@`Z@h}C@DZ}C@Z m|C@ńZ{C@ Z{C@MZ"|C@Z |C@?Z|C@VZ|C@Z {C@`Z {C@ Z{C@ZzC@Z {C@DZ{C@இZzC@ ܇ZzC@@Z zC@ZH{C@@9Zw{C@ ZZ}{C@ZY{C@@ˆZ zC@`ZzC@jZyC@@߉ZyC@ RZzC@RZzC@@Z{C@Z|C@BZ`|C@@Z}C@ZIC@ Z`cC@Z@3C@ Z`C@ZوC@ZZ C@QZC@ZnC@ZmC@`ϑZ @mC@ZlC@`ZZkC@ҐZjC@ Z@jC@qZjC@]ZhC@ZphC@Z &hC@@ҒZ@nhC@vZOhC@ÓZgC@ZgC@3Z hC@Z [hC@ZhC@4Z]gC@@ؗZgC@VZ gC@ԘZ@igC@!Z hC@?ZhC@ љZiC@Z`kC@)ZkC@ĚZ@ lC@BZ`blC@ ԛZRlC@`xZ lC@@ٜZ@lC@Z@nlC@ƜZ mC@@#Z@nC@̛Z oC@`ÛZoC@@ZpC@KZqC@Z@pC@ܜZ`pC@ ZpC@ZqC@НZ@qC@'ZGrC@1ZrC@ OZasC@`ZsC@Z]tC@ԟZ uC@mZuC@@2Z `vC@BZvC@ӢZgwC@Z^yC@cZ`yC@8ZyyC@Z iyC@`Z@ zC@Z?{C@ LZ{C@ʨZ@|C@`Z@|C@fZ|C@ Z`6|C@DZ{C@êZB|C@ Z Z}C@@pZ`c~C@ Z C@ਭZ@,C@@uZC@@AZـC@Z #C@ZC@ZԁC@ fZ:C@ZC@Z9C@`Z uC@యZC@@دZ@C@Z C@ ZࢆC@Z6C@@ ZC@gC@ ZgC@`BZ `hC@@ZiC@`ZkC@ Z`kC@ZlC@Z` mC@=Z kmC@Z`mC@Z@mC@,Z lC@ZYlC@ ZlC@Z mC@@YZ mC@ZmC@`Z`2lC@`ZlC@@ZkC@ZwkC@PZLkC@ZjC@Z`pjC@@ZAjC@Z UjC@ZBkC@,ZFkC@`7ZiC@"Z iC@ ZfC@ZfC@ZeC@ZZ [eC@VZGeC@+ZeC@0Z@eC@OZ`aC@,Z^C@GZx]C@ )Z@]C@Z`]C@BZn]C@@mZc]C@Z\C@ Zj\C@Z[C@`wZ$[C@`2ZsZC@Z`YC@ZXC@@ZOXC@@ZWC@ 6Z`ZWC@`"Z@WC@`5Z`VC@Z VC@ZUC@Z TC@PZ@SC@ ZRC@QZ `QC@`PZPC@ZOC@ Z UOC@ZNC@`Z@NC@`GZNC@ZNC@Z NC@`7ZJNC@Z9NC@ Z MC@Z@MC@XZMC@&Z OC@Z7PC@Z`qPC@=Z QC@KZ"QC@`ZQC@ ZQC@QZ@(RC@ZpRC@` ZRC@kZ RC@Z lRC@Z`#QC@`Z`&OC@ ZsNC@Z@MC@Z5KC@`Z@,IC@ZGC@Z GC@ Z _GC@ZFC@Z@FC@`:ZdEC@#ZNC@ʤZaC@o Z$aC@Z`aC@ZaC@Z@aC@ |ZwaC@*ZCaC@ĦZ!aC@`$Z`C@Z`C@@ťZM_C@Zs^C@ Z]C@@Z)]C@Z\C@RZZC@ #ZYC@`+ZyYC@ PZ`XC@Z 2XC@ Z`WC@ZvWC@Z`%WC@}Z VC@`LZVC@ZVC@@ ZUC@ʤZTC@@ͤZUTC@5ZSC@Z7SC@ Z RC@@Z RC@@BZ@VSC@[Z@ TC@`ZTC@Z@TC@@ZSC@ZSC@ZZSC@#ZTSC@`ZSC@ZzSC@`ɨZPSC@ Z'SC@ZRC@@%ZRC@9ZqRC@zZRC@ZnRC@Z"RC@ZQC@@ZQC@ZQC@@ΩZQC@ߩZ@ RC@Z RC@)ZQC@EZ`QC@ZPC@ZVPC@1Z`OC@GZNC@ìZNC@Z NC@ Z NC@ԬZFOC@ڬZ@rOC@yZPC@ZhPC@Z`PC@ZPC@@vZZQC@ ZRC@Z SC@Z`SC@ZSC@ ZMTC@`ͬZ TC@@Z@OUC@LZUC@OZUC@@@ZVC@`Z@RVC@ZVC@ ZVC@Z`VC@@ZVC@`Z`VC@ ZWC@@Z$XC@Z}XC@ܬZXC@ ʬZ`mYC@ZZC@SZ ZC@7Z[C@@7Z\C@@RZ\C@SZ}]C@dZ`^C@@ZZn^C@@Z@^C@`Z^C@Z5_C@Z_C@ Z_C@}Z<`C@%Zg`C@ Z =`C@IZ[`C@ Z$aC@$ZHC@@Z`C@ZPC@@Z PC@Z@QC@ Z`RC@Z RC@@Z"QC@ZMC@ZJC@lZJC@kZJC@Z&JC@Z`IC@ LZ`IC@RZHC@@Z`HC@`ZHC@Z.IC@@ZBIC@"Z`IC@@>ZIC@mZjIC@@ZIC@Z JC@ lZ`&JC@@ZJC@@Z eKC@Z`hKC@ZKC@ Z`BLC@;ZLC@@FZgMC@@Z`MC@;Z`MC@AZNC@2Z!OC@ vZOC@@Z`OC@ 8Z PC@JZ@nPC@rZ`hPC@Z@PC@Z cQC@ OZ@QC@ ]Z@RC@`@Z@ASC@bZSC@kZTC@@*Z%UC@@/Z@|UC@Z`VC@ ZVC@@:Z`2WC@ 0Z@WC@ Z WC@ Z?XC@ZXC@ZsYC@ZYC@(ZYC@Z`YC@ZAZC@Z`"[C@ ~Z`[C@@MZ[C@XZ`$\C@`ZS]C@Z@{]C@ gZ]C@mZ]^C@7Z@^C@Z`C@Z``C@@Z`l_C@Z@6_C@Z^C@`PZ@O^C@Z]^C@Z@]C@?Z]C@@tZ?]C@Z T\C@Z \C@"Z`\C@vZ\C@Z\C@Z \C@@Z[C@@!Z[C@Z8[C@Z [C@ZZC@qZ ZC@@Z qZC@LZ`CZC@(Z@YC@ZYC@Z YC@@YZ /YC@ZXC@Z@YC@@ZYC@ZXC@ZXC@ZyXC@7ZtXC@WZ`-XC@VZWC@ Z`UWC@Z@VC@Z@{VC@Z=VC@Z@UC@ Z UC@ Z6UC@[Z>UC@^ZTC@Z TC@`Z>UC@nZUC@@ZHUC@Z`fTC@"Z@-TC@iZQSC@@rZSC@ >ZRTC@ IZTC@kZNTC@ZKTC@ZSC@ZZSC@`Z@ SC@Z@RC@@Z@RC@.ZRC@@EZRC@^Z@xRC@wZ^RC@ Z QRC@ZSRC@ZRC@`ZRC@`ZRC@Z SC@`#ZSC@@HC@Z@HC@Z HC@ZHC@Z@HC@Z HC@@ZIC@Z-IC@`ZXIC@%Z{IC@CZ wIC@jZIC@wZIC@`Z@JC@ZdJC@Z lJC@Z aJC@`Z#JC@`Z JC@@ZJC@1ZIC@@CZIC@aZ@IC@dZ}IC@sZ@kIC@Z(IC@Z`HC@Z HC@Z XHC@`HZ@NHC@bZ HC@ZHC@`ZMHC@ ZTHC@ZHC@ ZJHC@"Z HC@@*ZHC@@'Z GC@zZGC@@tZ@GC@ Z GC@Z HC@RZHC@ LZ`IC@Z`IC@Z&JC@kZJC@lZJC@ZJC@ZMC@@Z"QC@Z RC@ Z`RC@Z@QC@@Z PC@ZPC@& Z$C@@tZ Y:C@}Z3C@iZ 5C@@KZo5C@@RZ6C@ZH6C@Z |6C@Z6C@̀Z6C@Z 6C@`rZc6C@ுZ J6C@Z6C@ Z6C@ ؁ZZ7C@Z7C@ kZ 7C@ FZ 8C@&ZN8C@ Z8C@Z8C@fZ8C@MZ@K9C@ Z:C@ Z(:C@`Z Y:C@~ZL:C@~Z`9C@~Z\9C@~ZR9C@~Z8C@O~Z8C@ }Z8C@`}Z@68C@`~Zv7C@G}Z5C@}Z'6C@|Z a6C@|Z 6C@@|Z 7C@|Zc7C@ C|Z@U7C@{Z 7C@{Z 7C@{Z 6C@~{Z5C@zZ5C@zZk5C@zZJ5C@@zZ@5C@zZ4C@zZ@4C@@zZ@4C@ MzZ 4C@ HzZ3C@yZ 3C@yZ 53C@yZ2C@!yZ2C@xZ2C@\xZv2C@ /xZ2C@wZ2C@wZq2C@ wZ#2C@` wZ 2C@@wZ/1C@wZI0C@vZ/C@ vZ/C@wvZT/C@cvZ.C@vZ@.C@tvZ-C@vZ ,C@vZo,C@AvZ`a,C@uZ`&,C@uuZ`a+C@tZh*C@tZ[*C@tZ@A*C@@tZ )C@@1uZ B)C@BuZ(C@ uZY(C@uZ`'C@`uZ &C@uZ@%C@GvZ`W%C@vZ!%C@ vZ$C@xwZ %C@wZ%C@wZB&C@wZ?&C@`xZ@%C@ xZ&C@xZu'C@yZ@9)C@yZ)C@`zZ)C@ zZV*C@{Zf*C@{Z6+C@`{Z+C@R}Z.C@P}Z /C@,}Z`O0C@Q}Z0C@U}Zh1C@g}Z@1C@@~Z@|1C@@~Z`0C@~Z0C@Z1C@`BZN2C@Z2C@Z2C@Z2C@ Z:3C@Z ]3C@{Zd3C@ୀZ2C@Z2C@Z3C@'XvZ C@`lZ(C@HvZ@(%C@uZ%C@uZ^&C@ uZ&C@@FuZ'C@tZ=(C@tZ(C@tZ(C@jtZ'C@%tZ'C@ tZa'C@@sZ@'C@esZ'C@rZ'C@rZ |'C@rZ`I'C@rZ&C@@rZ`&C@5rZK'C@ rZ}'C@qZ'C@qZ`'C@ ]qZ 'C@4qZ'C@`pZ(C@`oZ 'C@oZ+'C@@loZ&C@@oZ`z&C@nZ@\&C@nZ6&C@ nZ`&C@nZ%C@dnZ%C@LnZ&C@"nZ`%C@mZ@I%C@{mZ$C@jmZ$C@\mZ$C@5mZ#C@lZ@#C@lZ T#C@`lZ "C@@-mZ!C@mZY!C@nZ.!C@NnZ`7!C@0oZ@6 C@@UoZC@oZ`kC@oZ`C@UpZ C@ppZC@pZ`BC@@qZ`C@qZ{C@`C@>jZC@ xjZhC@@jZeC@@jZ C@jZ@qC@`jZAC@ jZ@C@ jZC@`jZ.C@5kZ`yC@ kZ@C@`kZ@;C@TlZhC@ slZC@{lZ@C@`lZGC@ lZ`C@mZ`C@ mZC@XnZkC@`oZ C@`#oZ C@nZzC@!nZC@*ZB@ZC@S Z %C@`ZI C@Z C@=Z` C@@BZ C@?ZC@`2ZV C@Z C@Z` C@Z C@`Z C@@2Z C@`ZGC@Z'C@Z@C@Z@BC@ ZC@ZHC@ HZC@`ZC@`ZC@ZC@ZC@ZC@ZC@@ZC@ZC@Z@C@Z@oC@Z@C@@AZ`9C@@Z UC@=Z@ C@`\Z@ C@ Z C@ Z C@-h@+iZB@X^Z C@LcZB@`vcZrB@cZB@cZB@GdZB@ eZ_B@,eZ`B@5eZ@zB@5eZ JB@ZeZB@fZ B@"fZSB@ "fZ#C@4fZ C@ >fZ C@*fZ yC@ fZC@@eZWC@B@ZB@׶Z@B@ZB@`Z B@@Z@B@@ZpB@pZ`B@@ZJB@ZB@ZB@CZ`mB@ZrB@`[@B@ G[@MB@[B@[`B@[ B@'[GB@ [B@`[B@[ B@ [8B@[B@ [B@@;[B@0[`}B@ C[B@@&[`B@[+B@[B@_[B@9[ B@` [@RB@@[ B@y[B@@@[B@`-[_B@J[B@@K[B@`8[B@ [B@ [B@[@bB@[@B@H[B@ [%B@ [ B@ [B@[ BB@[B@$[RB@ I[B@[B@N[@/B@[@B@ [ B@3X@eZB@ _[Z B@hLcZB@ cZB@`bZ B@`bZ@B@ bZ@B@`raZ B@`Z&B@q`ZB@`ZB@_Z B@`K_ZB@`^ZB@ q^Z BB@p^Z>B@.^ZB@ _^Z@B@@i^ZfB@V^ZB@]ZB@`]ZIB@ \ZB@7\ZZB@/\ZAB@[Z B@ h[ZB@ _[ZB@[Z@B@`[Z`AB@@[ZB@@[ZB@ [ZfB@[ZB@ [Z`B@[ZB@@\ZB@@i\ZcB@`y\ZNB@ \Z B@\ZB@ \Z`uB@ \ZB@`\ZB@\ZB@`\Z@`B@\ZB@`\ZB@\Z`B@\ZB@@Q]Z8B@]ZB@@\Z:B@\ZjB@\Z}B@A]ZTB@`{]ZB@ (^ZB@ u^ZB@`^ZBB@a_ZB@(`Z@B@W`ZiB@`ZHB@`ZB@aZ4B@ aZ{B@aZ rB@y`Z B@ j`ZB@_Z kB@@_ZB@@_ZPB@_Z@qB@ dZ rB@dZB@dZDB@@eZGB@dZ@B@ ldZ=B@qdZB@dZ2B@cZcB@ cZ`B@cZB@@cZ&B@cZB@QcZB@0cZ'B@@+cZyB@cZB@ bZB@bZB@bZB@`bZ`!B@ bZrB@@bZ`B@bZ PB@bZ B@bZ B@bZ B@ cZB@1cZ'B@CcZB@8cZ`B@LcZB@4@FZ^B@">Z}B@EFZ`B@EZ`B@`GEZGB@@rEZB@6EZB@ DZ@B@@&DZ B@aCZ B@ CZ B@5CZ5B@pBZoB@BZB@@BZB@BZ3B@@C@Z}B@?Z@=B@|?Z@B@`>Z`B@>Z B@ 3>Z|B@O>ZB@ >Z@B@>ZIB@@>ZB@Q>Z`B@@&>ZB@>Z +B@ "?ZB@>Z,B@q>Z@B@">ZB@@)>ZB@`}>ZB@g>Z@QB@`>Z`zB@>Z@B@>ZnB@>ZB@>ZkB@>Z`B@ '>ZB@->ZBB@~>ZDB@`>ZB@>ZB@`?ZB@ ?ZB@ ?ZB@@?ZfB@ ?Z B@`b?ZBB@ g?ZB@@?Z@B@`?ZDB@?ZB@=@Z^B@AZB@ AZ@sB@AZNB@CZDB@BZB@BZB@*CZ?B@ CZ@B@xCZ` B@MDZB@EZB@ EZZB@FZ`B@5`^ZAB@ZZB@/\ZAB@7\ZZB@ \ZB@`]ZIB@]ZB@V^ZB@@i^ZfB@ _^Z@B@.^ZB@p^Z>B@ q^Z BB@`^ZB@^ZB@]Z@B@``]ZB@I]ZB@J]ZB@`d]ZB@\ZB@|[Z0B@ [ZlB@[ZB@ZZ@B@`4[ZB@[ZJB@/\ZAB@6[B@ ZB@3[ 7B@[B@ y[{B@@[nB@`[>B@{[B@@[B@[@FB@[}B@*[@B@"[B@~[@(B@>[OB@Z LB@`YZB@Z8B@ZB@`@Z B@ZB@LZ@jB@6ZB@ZB@`ZB@rZ B@@ZB@ZB@Z`B@ ZB@ZB@(Z ?B@$Z`B@ wZ`B@=Z^B@ZIB@AZB@@?ZB@ZB@[uB@@[B@ [ #B@ X[mB@`{[B@[@@B@[B@N[B@@[B@[?B@O[@B@[B@H[`B@[ 7B@70`t[B@q[@B@[B@`t[rB@a[B@`:[ B@ [B@[>B@[@B@[B@ [B@V[B@?[`B@[rB@~[ B@R~[B@t}[B@@|[6B@|[ B@|[B@6|[B@`|[@rB@`{[nB@m{[`B@` {[@B@@0z[PB@ x[B@$w[B@v[B@ nv[` B@@*t[`B@s[@B@Ds[@B@`r[B@@r[B@r[ dB@ s[B@`s[dB@u[@B@ v[ B@`w[B@`*w[ B@5w[3B@`1v[`JB@`u[`SB@s[~B@(s[>B@ r[lB@r[B@`zs[@^B@`hs[@B@`s[B@Qr[B@r[3B@q[B@q[KB@ms[\B@s[@B@t[@B@Yu[B@`u[B@Ov[B@e~[B@~[ B@~[@{B@[B@ F[ B@[cB@@ [ B@ [$B@v[B@s[B@[B@@[[B@Z[JB@s[eB@ [B@ـ[HB@π[B@[`B@[B@8@iZ`B@_ZB@%dZB@ dZ rB@_Z@qB@@_ZPB@s`ZOB@t`Z B@``Z@B@``ZB@t`Z`B@n`Z2B@`Z`PB@`ZB@`ZB@`ZB@ aZ@UB@aZaB@_aZB@bZB@bZ B@cZ`B@eZB@eZ B@gZB@gZbB@gZ dB@ gZB@~gZ@B@`>iZ@B@=iZB@iZB@`iZ!B@fZ1B@ PfZ @B@@MfZ`B@eZdB@eZB@dZB@9`\Z B@`ZZ)B@\ZB@`\Z@`B@\ZB@`\ZB@ \ZB@ \Z`uB@\ZB@ \Z B@`y\ZNB@@i\ZcB@@\ZB@[ZB@ [Z`B@[ZB@ [ZfB@@[ZB@@[ZB@ m[Z&B@[Z)B@ZZ`B@`ZZB@ZZ B@ ZZ6B@ [ZB@h[Z`B@ [Z B@)\ZB@\ZB@:hZ@1B@lZB@  ZB@BZB@Z@B@ZB@6ZB@Z'B@pZ`B@ZB@ ZJB@ZB@@9ZB@ZB@ZB@tZ B@ ZB@ZB@ {ZB@Z'B@KZB@ZPB@Z`B@Z;B@PZMB@Z oB@`Z@B@xZHB@`ZB@Z@eB@~Z B@hZB@WZB@@ZB@?ZB@ZYB@ QZB@@Z 6B@Z@gB@}ZB@`lZB@Z`B@Z@B@@Z uB@ZB@Z@B@ZB@0Z`6B@Z -B@!ZoB@ EZB@@Z?B@Z B@`ZB@ZB@Z B@ zZ B@ZB@ fZ@B@Z@B@Z@ B@ZB@vZ B@Z B@Z`=B@NZ`B@BZB@ ZJB@@^Z` B@ZB@`ZzB@ZB@Z5B@Z7B@mZeB@ZB@PZB@vZTB@ZB@ ZtB@Z@*B@ZB@LZ@kB@ Z@B@Z`B@Z B@`ZB@@ZB@ ?ZB@GZ7B@ZdB@Z`B@ZB@]ZB@ ZB@/Z gB@ZB@Z`tB@JZ`;B@ ZiB@ϾZ@B@:Z pB@Z B@ ,Z tB@οZB@`^ZB@Z@hB@ ZB@ ZB@ZGB@iZB@ZB@Z PB@6ZB@ֻZ@ B@Z`B@`]Z*B@ĸZB@Z]B@`YZ`aB@Z B@Z`B@_ZB@ZvB@PZB@`Z%B@=Z&B@_Z`B@@Z B@lZ` B@ଲZpB@@Z B@]ZB@ Z`B@=ZB@ Z B@7Z&B@bZB@`DZdB@`KZB@ZB@`ZſB@`2Z[B@@YZ`B@lZ@EB@`Z۾B@FZB@Z@B@ ZCB@ ~Z@B@ݺZļB@ZYB@CZKB@8ZB@ZZRB@ZZsB@ ZZB@ZZB@`ZZ`B@`ZZ`B@YZB@XZB@XZB@@XZjB@XZEB@ XZ 7B@@XZ B@XZ B@XZ]B@ XZB@XZB@XZ`B@XZ B@@=XZB@` XZB@XZB@WZ B@ WZB@WZ nB@`GXZB@XZB@XZB@`YZB@ AYZB@@zYZ@B@@ZZB@`qZZB@9[ZB@\Z B@;\Z`B@u\Z`B@\Z@B@5]Z` B@]Z@B@ ^ZB@`^ZB@S^ZB@&^Z mB@ ;^Z@ B@(^Z`B@^ZB@ ^ZB@]ZB@]ZB@ ]Z@B@+\ZB@@\Z@B@>@ZB@Z@B@vZ B@`5ZB@\Z@B@ Z`CB@Z B@ Z "B@Z@B@ZB@`QZB@ZB@Z SB@@Z`B@ZB@@EZdB@ZB@ bZ0B@@Z[B@vZ B@?d\ bB@T\6B@2U\ ֲB@yV\ B@`W\XB@3W\iB@`V\཯B@V\B@W\B@X\@ӪB@?Y\`XB@`Y\B@ .[\`B@)]\B@w^\ B@@Y_\ bB@_\}B@`\஦B@ a\B@a\B@ b\%B@ @b\`MB@b\`FB@c\@߬B@d\@B@c\B@b\̯B@`\B@_\@B@|]\ ¯B@`b]\B@]\)B@r]\B@ \\ ùB@[\`B@rZ\oB@Y\bB@`W\B@NV\6B@ U\B@U\B@@U\`AB@`T\`B@ 0T\@B@T\fB@?T\`¶B@T\ B@+U\ dB@lU\@B@MU\B@U\B@U\ ֲB@@/Z`ԈB@ZB@`ZB@Z@B@`ZB@5ZնB@Z_B@ZdB@EZ@hB@@kZ`,B@ZळB@ZB@ CZ@ƱB@hZ @B@ ZB@ ثZcB@`@Z tB@ ZuB@`yZࢯB@Z@B@pZ B@IZ`B@eZ@B@HZB@ Z`B@ QZ@ܭB@ZtB@@vZ ;B@*ZŬB@`)Z`?B@WZ QB@@DZ༪B@ Z B@Z>B@ZB@`Z@CB@aZ NB@@-Z`ܦB@ZB@eZĥB@`ZB@@-ZæB@ZҦB@nZB@ZລB@ZB@`סZB@`6Z5B@ৠZB@@,ZYB@֟ZZ@hB@ZB@Z@B@;Z@B@ tZ ˚B@@'ZB@@Z zB@@ZB@ƮZ`zB@ Z֘B@SZ@B@Z`B@ZPB@@ Z+B@:Z`iB@ :Z@B@pZB@@JZYB@(ZIB@ ;Z B@ྯZwB@ZhB@@ȰZ B@`Z@`B@ZࡎB@;ZaB@@Z@ԐB@mZKB@ ZB@вZ`̓B@ ȲZB@`ZYB@>Z@B@QZB@?Z]B@ZԙB@ϲZ B@AZB@ аZ 5B@ZB@`Z@B@Z NB@ ZNB@Z`B@ NZB@ZB@`lZB@`cZB@`ZB@`̮Z-B@QZ:B@ ZuB@Z`B@eZcB@ JZBB@ _ZӡB@ZB@ٯZ yB@|ZdB@ZB@Z@B@ͱZğB@@ZB@ ZB@Z B@Z@}B@kZB@kZ`fB@ZܥB@`[Z TB@ ZB@ZԧB@]Z ,B@ƲZB@ Z jB@|Z wB@`ڵZ+B@Z B@ZשB@ZMB@`ZӪB@Z@B@`Z B@öZB@ֶZ@B@@/Z,B@/Z B@ZϯB@Z B@ZVB@୵ZeB@ݳZ`lB@`B@ _#[4B@ [>B@ [>B@ [`B@ [ಢB@`![`B@_#[B@ _#[*B@_#[4B@Fiona-1.7.10/tests/data/coutwildrnp.shx000066400000000000000000000011741317446052300200150ustar00rootroot00000000000000' >d\`ԈB@">ZD@2 `X^h !#*p$0%0)*,3j:XARC@EZH NPUJ\^bejJlmqxx|x8VZXbPj@h@: ^ X&@jV 0>@~hǶȺ(˒>B>PՒ`Fiona-1.7.10/tests/data/grenada.geojson000066400000000000000000000311721317446052300177070ustar00rootroot00000000000000{"features":[{"geometry":{"coordinates":[[[[-61.173214300000005,12.516654800000001],[-61.3827217,12.5301363],[-61.665747100000004,12.5966532],[-61.6661847,12.596],[-61.66814250000001,12.593],[-61.6700247,12.59],[-61.6718337,12.587],[-61.673571700000004,12.584],[-61.6752407,12.581],[-61.6768427,12.578],[-61.678379400000004,12.575000000000001],[-61.6803295,12.571],[-61.6830501,12.565000000000001],[-61.68553430000001,12.559000000000001],[-61.687063699999996,12.555000000000001],[-61.6884946,12.551],[-61.6898391,12.546999999999999],[-61.69209600000001,12.540999999999999],[-61.69413360000001,12.535],[-61.69595870000001,12.529],[-61.697577200000005,12.523],[-61.69899410000001,12.517],[-61.700213700000006,12.511],[-61.7012395,12.505],[-61.7020744,12.499],[-61.702626200000005,12.494],[-61.7033841,12.493],[-61.706211800000005,12.491],[-61.7089415,12.489],[-61.7141311,12.485000000000001],[-61.718995500000005,12.481],[-61.72356890000001,12.477],[-61.727879200000004,12.473],[-61.7319495,12.469000000000001],[-61.73579920000001,12.465000000000002],[-61.74032590000001,12.46],[-61.74373590000001,12.456000000000001],[-61.746971,12.452000000000002],[-61.7500412,12.447999999999999],[-61.75295580000001,12.443999999999999],[-61.753784499999995,12.443],[-61.756858300000005,12.44],[-61.7598054,12.437],[-61.762633400000006,12.434],[-61.76534870000001,12.431],[-61.767957200000005,12.427999999999999],[-61.7704641,12.425],[-61.7728741,12.422],[-61.775191500000005,12.419],[-61.7774201,12.416],[-61.7802595,12.412],[-61.782954800000006,12.408],[-61.78551270000001,12.404],[-61.7873446,12.401],[-61.789675900000006,12.397],[-61.7918847,12.393],[-61.79397550000001,12.389000000000001],[-61.794998400000004,12.388],[-61.79830060000001,12.386000000000001],[-61.8030062,12.383000000000001],[-61.8059936,12.381],[-61.810272399999995,12.378],[-61.8130009,12.376000000000001],[-61.815637599999995,12.374],[-61.8181882,12.372000000000002],[-61.82186339999999,12.369000000000002],[-61.8265048,12.365000000000002],[-61.830876599999996,12.361],[-61.8329692,12.359000000000002],[-61.835999,12.356000000000002],[-61.8413082,12.351],[-61.845319800000006,12.347],[-61.8464439,12.346],[-61.8501187,12.343],[-61.853625699999995,12.34],[-61.85697739999999,12.337],[-61.86122339999999,12.333],[-61.864252900000004,12.33],[-61.8671584,12.327],[-61.8699469,12.324],[-61.872645999999996,12.321],[-61.8754727,12.318],[-61.87906749999999,12.314],[-61.8833,12.309000000000001],[-61.88726319999999,12.304],[-61.88952,12.301],[-61.891690399999995,12.297999999999998],[-61.8937778,12.295],[-61.895785200000006,12.292],[-61.89771530000001,12.289],[-61.899570800000006,12.286],[-61.90251490000001,12.280999999999999],[-61.904753,12.277],[-61.9068719,12.273],[-61.908875900000005,12.269],[-61.911674299999994,12.263],[-61.9134062,12.259],[-61.9150578,12.255],[-61.9179797,12.248999999999999],[-61.920656900000004,12.242999999999999],[-61.92290190000001,12.238999999999999],[-61.925082,12.235],[-61.92666,12.232],[-61.9286637,12.227999999999998],[-61.930556100000004,12.223999999999998],[-61.9332651,12.217999999999998],[-61.936145100000005,12.212],[-61.938782200000006,12.206],[-61.943587599999994,12.193999999999999],[-61.94511500000001,12.19],[-61.9465439,12.186],[-61.9485074,12.18],[-61.95028749999999,12.174],[-61.95186999999999,12.168],[-61.9532519,12.162],[-61.95443739999999,12.156],[-61.954975999999995,12.154],[-61.9570107,12.147999999999998],[-61.9594482,12.139999999999999],[-61.961132600000006,12.133999999999999],[-61.962614,12.127999999999998],[-61.96295200000001,12.126999999999999],[-61.9668105,12.122],[-61.9704259,12.116999999999999],[-61.9738135,12.112],[-61.9769866,12.107],[-61.9799566,12.102],[-61.9827336,12.097],[-61.9853262,12.092],[-61.9882048,12.086],[-61.990875800000005,12.08],[-61.99252880000001,12.076],[-61.994819,12.07],[-61.996888999999996,12.064],[-61.99874590000001,12.058],[-62.000395600000004,12.052000000000001],[-62.0018433,12.046],[-62.0030933,12.04],[-62.003818700000004,12.036],[-62.0047472,12.03],[-62.0052609,12.026],[-62.005875200000006,12.02],[-62.0061812,12.016],[-62.0064861,12.01],[-62.0065868,12.006],[-62.006584499999995,12],[-62.006398100000006,11.994],[-62.0061714,11.99],[-62.0056768,11.984],[-62.0052436,11.98],[-62.004436999999996,11.974],[-62.003794,11.97],[-62.0026693,11.964],[-62.001811399999994,11.96],[-62.0003595,11.954],[-61.999279800000004,11.950000000000001],[-61.9974886,11.943999999999999],[-61.9961776,11.94],[-61.9940313,11.934],[-61.9924772,11.93],[-61.9908218,11.926],[-61.989062399999995,11.922],[-61.9871961,11.918],[-61.984707699999994,11.913],[-61.9825882,11.909],[-61.9803498,11.905000000000001],[-61.9773776,11.9],[-61.9748543,11.896],[-61.972195400000004,11.892000000000001],[-61.9693945,11.888],[-61.9664442,11.884],[-61.9641286,11.881],[-61.9617206,11.878],[-61.959215900000004,11.875000000000002],[-61.9557177,11.871],[-61.9520267,11.867],[-61.9496952,11.864],[-61.94728729999999,11.861],[-61.9430571,11.856000000000002],[-61.93853550000001,11.851],[-61.934690599999996,11.847],[-61.9306255,11.843],[-61.9274208,11.84],[-61.922921800000005,11.836],[-61.9193636,11.833],[-61.9156332,11.83],[-61.911715,11.827],[-61.9075906,11.824],[-61.903238,11.821],[-61.89863020000001,11.818],[-61.8937341,11.815000000000001],[-61.888507499999996,11.812000000000001],[-61.88481339999999,11.81],[-61.8789067,11.807],[-61.87468659999999,11.805000000000001],[-61.870200499999996,11.803],[-61.86540230000001,11.801],[-61.8602301,11.799],[-61.854597299999995,11.796999999999999],[-61.848375600000004,11.795],[-61.84498479999999,11.793999999999999],[-61.8413608,11.793],[-61.8374527,11.792],[-61.8331873,11.790999999999999],[-61.828452500000004,11.79],[-61.8230605,11.789],[-61.81664609999999,11.787999999999998],[-61.808274399999995,11.786999999999999],[-61.790283900000006,11.786],[-61.7840631,11.786],[-61.76607270000001,11.786999999999999],[-61.7573236,11.787999999999998],[-61.73933300000001,11.789],[-61.730961300000004,11.79],[-61.72079310000001,11.790999999999999],[-61.70280230000001,11.792],[-61.6944305,11.793],[-61.688016000000005,11.793999999999999],[-61.6826238,11.795],[-61.6732043,11.796999999999999],[-61.667812100000006,11.797999999999998],[-61.663077200000004,11.799],[-61.6588117,11.8],[-61.654903499999996,11.801],[-61.6512793,11.802000000000001],[-61.64788839999999,11.803],[-61.644693499999995,11.804],[-61.63878580000001,11.806000000000001],[-61.636033600000005,11.807],[-61.6308613,11.809000000000001],[-61.62841970000001,11.81],[-61.623784,11.812000000000001],[-61.621576700000006,11.813],[-61.6173564,11.815000000000001],[-61.6133668,11.817],[-61.60957990000001,11.819],[-61.6042318,11.822000000000001],[-61.6008621,11.824],[-61.5976324,11.826],[-61.5930244,11.829],[-61.590096,11.831],[-61.5872727,11.833],[-61.585739600000004,11.834],[-61.5816382,11.836],[-61.5758831,11.839],[-61.5705345,11.842],[-61.565532900000015,11.845],[-61.56375870000001,11.846],[-61.55785109999999,11.849],[-61.552374300000004,11.852000000000002],[-61.5472238,11.855000000000002],[-61.543925300000005,11.857000000000001],[-61.5407605,11.859000000000002],[-61.5362404,11.862000000000002],[-61.533365200000006,11.864],[-61.530591300000005,11.866000000000001],[-61.52791200000001,11.868],[-61.5240577,11.871],[-61.5215904,11.873000000000001],[-61.5180317,11.876000000000001],[-61.515748300000006,11.878],[-61.5124482,11.881],[-61.5103269,11.883000000000001],[-61.5072563,11.886000000000001],[-61.49835930000001,11.895000000000001],[-61.494617399999996,11.899000000000001],[-61.4902146,11.904],[-61.48533390000001,11.909],[-61.4816423,11.913],[-61.47729749999999,11.918],[-61.4732301,11.923],[-61.4694197,11.927999999999999],[-61.464353900000006,11.935],[-61.4615887,11.939],[-61.458328800000004,11.943999999999999],[-61.4552762,11.949],[-61.452420399999994,11.954],[-61.450271099999995,11.958],[-61.4482377,11.962000000000002],[-61.4454269,11.967],[-61.4438039,11.97],[-61.4412339,11.975000000000001],[-61.4388714,11.979000000000001],[-61.436091399999995,11.984],[-61.43451230000001,11.987],[-61.4329978,11.99],[-61.4310762,11.994],[-61.428396400000004,12],[-61.42595080000001,12.006],[-61.423730500000005,12.012],[-61.42313910000001,12.013],[-61.4211047,12.016],[-61.41851320000001,12.02],[-61.4166569,12.023],[-61.41487299999999,12.026],[-61.4131591,12.029],[-61.4109797,12.033],[-61.409422,12.036],[-61.40744449999999,12.04],[-61.405577300000004,12.043999999999999],[-61.4038171,12.047999999999998],[-61.402161,12.052000000000001],[-61.399865999999996,12.058],[-61.39845880000001,12.062000000000001],[-61.3971473,12.066],[-61.3959295,12.07],[-61.394275,12.076],[-61.393596300000006,12.078],[-61.3910564,12.081],[-61.38782199999999,12.085],[-61.3855047,12.088],[-61.382552100000005,12.092],[-61.3804362,12.095],[-61.37774039999999,12.099],[-61.3758089,12.102],[-61.3733493,12.106],[-61.37158839999999,12.109],[-61.369348200000005,12.113],[-61.367746499999996,12.116],[-61.36571180000001,12.12],[-61.3637893,12.123999999999999],[-61.3619754,12.127999999999998],[-61.360267099999994,12.132],[-61.35866139999999,12.136],[-61.35643999999999,12.142],[-61.35508039999999,12.145999999999999],[-61.3538123,12.15],[-61.3520543,12.156],[-61.3509963,12.16],[-61.35002769999999,12.164],[-61.3487397,12.17],[-61.34798939999999,12.174],[-61.34732449999999,12.177999999999999],[-61.3464859,12.184],[-61.346031399999994,12.187999999999999],[-61.345660099999996,12.192],[-61.3452579,12.197999999999999],[-61.3450925,12.202],[-61.3450091,12.206],[-61.345007599999995,12.209999999999999],[-61.345087899999996,12.213999999999999],[-61.3452502,12.217999999999998],[-61.345494599999995,12.222],[-61.34582149999999,12.225999999999999],[-61.3462313,12.229999999999999],[-61.347002499999995,12.235999999999999],[-61.34762189999999,12.239999999999998],[-61.3483264,12.243999999999998],[-61.3495448,12.25],[-61.345779300000004,12.253],[-61.3421596,12.256],[-61.339838,12.258],[-61.3364839,12.261],[-61.333273999999996,12.264],[-61.330199,12.267],[-61.3272505,12.27],[-61.324421099999995,12.273],[-61.3217043,12.276],[-61.31824699999999,12.28],[-61.3157713,12.283],[-61.3126179,12.286999999999999],[-61.30962449999999,12.290999999999999],[-61.3067826,12.295],[-61.30408469999999,12.299],[-61.301524099999995,12.303],[-61.300435,12.304],[-61.297963700000004,12.306000000000001],[-61.29439910000001,12.309000000000001],[-61.29211200000002,12.311],[-61.2888064,12.314],[-61.286681699999995,12.316],[-61.283606,12.319],[-61.280656900000004,12.322000000000001],[-61.277826999999995,12.325000000000001],[-61.27510960000001,12.328],[-61.272499,12.331],[-61.26999000000001,12.334],[-61.267577800000005,12.337],[-61.26525820000001,12.34],[-61.262302800000015,12.344],[-61.260184900000006,12.347],[-61.25748639999999,12.351],[-61.256502700000006,12.352000000000002],[-61.25251010000001,12.355000000000002],[-61.248711400000005,12.358],[-61.245090100000006,12.361],[-61.2416324,12.364],[-61.23832620000001,12.367],[-61.235161000000005,12.370000000000001],[-61.23212780000001,12.373000000000001],[-61.22827520000001,12.377],[-61.22552040000001,12.38],[-61.22287430000001,12.383000000000001],[-61.220331400000006,12.386000000000001],[-61.2170932,12.39],[-61.2147732,12.393],[-61.2118172,12.397],[-61.209698800000005,12.4],[-61.206999800000006,12.404],[-61.205066,12.407],[-61.20258810000001,12.411],[-61.2008015,12.414],[-61.199085100000005,12.417],[-61.19743690000001,12.42],[-61.19585520000001,12.423],[-61.19433810000001,12.426],[-61.19241330000001,12.43],[-61.1897291,12.436],[-61.1872793,12.442],[-61.18505530000001,12.447999999999999],[-61.1836941,12.452000000000002],[-61.1824277,12.456000000000001],[-61.18125439999999,12.46],[-61.180172500000005,12.464],[-61.1791805,12.468],[-61.178277,12.472000000000001],[-61.1770853,12.478],[-61.17603230000001,12.484],[-61.175387900000004,12.488],[-61.1745797,12.494],[-61.1741456,12.498],[-61.1737945,12.502],[-61.173526,12.506],[-61.17333980000001,12.51],[-61.17323580000001,12.514],[-61.173214300000005,12.516654800000001]]]],"type":"MultiPolygon"},"id":550727,"osm_type":"relation","type":"Feature","name":"Grenada","properties":{"flag":"http://upload.wikimedia.org/wikipedia/commons/b/bc/Flag_of_Grenada.svg","name":"Grenada","name:cs":"Grenada","name:de":"Grenada","name:en":"Grenada","name:eo":"Grenado","name:fr":"Grenade","name:fy":"Grenada","name:hr":"Grenada","name:nl":"Grenada","name:ru":"Гренада","name:sl":"Grenada","name:ta":"கிரெனடா","name:uk":"Гренада","boundary":"administrative","name:tzl":"Grenada","timezone":"America/Grenada","wikidata":"Q769","ISO3166-1":"GD","wikipedia":"en:Grenada","admin_level":"2","is_in:continent":"North America","ISO3166-1:alpha2":"GD","ISO3166-1:alpha3":"GRD","ISO3166-1:numeric":"308"}}],"type":"FeatureCollection","geocoding":{"creation_date":"2016-10-12","generator":{"author":{"name":"Mapzen"},"package":"fences-builder","version":"0.1.2"},"license":"ODbL (see http://www.openstreetmap.org/copyright)"}} Fiona-1.7.10/tests/data/sequence-pp.txt000066400000000000000000000311561317446052300177100ustar00rootroot00000000000000{ "geometry": { "type": "Polygon", "coordinates": [ [ [ -111.73527526855469, 41.995094299316406 ], [ -111.65931701660156, 41.99627685546875 ], [ -111.6587142944336, 41.9921875 ], [ -111.65888977050781, 41.95676803588867 ], [ -111.67082977294922, 41.91230010986328 ], [ -111.67332458496094, 41.905494689941406 ], [ -111.67088317871094, 41.90049362182617 ], [ -111.66474914550781, 41.893211364746094 ], [ -111.6506576538086, 41.875465393066406 ], [ -111.64759826660156, 41.87091827392578 ], [ -111.64640808105469, 41.86273956298828 ], [ -111.64334869384766, 41.858192443847656 ], [ -111.63720703125, 41.85499572753906 ], [ -111.633544921875, 41.847267150878906 ], [ -111.63053894042969, 41.83409118652344 ], [ -111.6330337524414, 41.82728576660156 ], [ -111.63983154296875, 41.8227653503418 ], [ -111.6484603881836, 41.82188034057617 ], [ -111.66077423095703, 41.82327651977539 ], [ -111.6712417602539, 41.82330322265625 ], [ -111.67618560791016, 41.82013702392578 ], [ -111.68803405761719, 41.78792953491211 ], [ -111.69361114501953, 41.77931594848633 ], [ -111.70162200927734, 41.77797317504883 ], [ -111.70901489257812, 41.77663040161133 ], [ -111.71395111083984, 41.772098541259766 ], [ -111.71891784667969, 41.763031005859375 ], [ -111.72816467285156, 41.75851058959961 ], [ -111.74726104736328, 41.75537109375 ], [ -111.75650024414062, 41.752662658691406 ], [ -111.77067565917969, 41.7445182800293 ], [ -111.77064514160156, 41.75495910644531 ], [ -111.75585174560547, 41.76219940185547 ], [ -111.7330551147461, 41.766693115234375 ], [ -111.72749328613281, 41.77212905883789 ], [ -111.71883392333984, 41.7834587097168 ], [ -111.71080780029297, 41.78889083862305 ], [ -111.70340728759766, 41.79250717163086 ], [ -111.70030212402344, 41.798404693603516 ], [ -111.70210266113281, 41.8088493347168 ], [ -111.70760345458984, 41.819759368896484 ], [ -111.71312713623047, 41.82340621948242 ], [ -111.71929168701172, 41.82341766357422 ], [ -111.72545623779297, 41.8225212097168 ], [ -111.7341537475586, 41.803016662597656 ], [ -111.740966796875, 41.79213333129883 ], [ -111.74531555175781, 41.78215408325195 ], [ -111.77122497558594, 41.7658576965332 ], [ -111.77056884765625, 41.77811813354492 ], [ -111.7662582397461, 41.778106689453125 ], [ -111.76746368408203, 41.78628158569336 ], [ -111.76253509521484, 41.78627395629883 ], [ -111.76241302490234, 41.82259750366211 ], [ -111.77104187011719, 41.8221549987793 ], [ -111.77161407470703, 41.83351135253906 ], [ -111.7333755493164, 41.84524154663086 ], [ -111.73274993896484, 41.847511291503906 ], [ -111.7376708984375, 41.84979248046875 ], [ -111.77157592773438, 41.845767974853516 ], [ -111.77215576171875, 41.85802459716797 ], [ -111.75243377685547, 41.85844802856445 ], [ -111.72467803955078, 41.86384201049805 ], [ -111.71109771728516, 41.868804931640625 ], [ -111.70182037353516, 41.87604904174805 ], [ -111.69624328613281, 41.88193893432617 ], [ -111.69497680664062, 41.88874816894531 ], [ -111.70053100585938, 41.89057540893555 ], [ -111.70793151855469, 41.88923263549805 ], [ -111.72091674804688, 41.87972640991211 ], [ -111.73388671875, 41.87384796142578 ], [ -111.75301361083984, 41.86888885498047 ], [ -111.75350952148438, 41.90249252319336 ], [ -111.74364471435547, 41.90247344970703 ], [ -111.74463653564453, 41.967864990234375 ], [ -111.7119369506836, 41.96416473388672 ], [ -111.69283294677734, 41.95912551879883 ], [ -111.68911743164062, 41.96047592163086 ], [ -111.6891098022461, 41.96320343017578 ], [ -111.69341278076172, 41.96684646606445 ], [ -111.70449829101562, 41.972320556640625 ], [ -111.7341079711914, 41.97828674316406 ], [ -111.73527526855469, 41.995094299316406 ] ] ] }, "type": "Feature", "id": "0", "properties": { "PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT" } } { "geometry": { "type": "Polygon", "coordinates": [ [ [ -112.00384521484375, 41.552703857421875 ], [ -112.00446319580078, 41.56586456298828 ], [ -112.0112075805664, 41.56586456298828 ], [ -112.01121520996094, 41.57902526855469 ], [ -112.01734924316406, 41.57902526855469 ], [ -112.0173568725586, 41.594459533691406 ], [ -112.02779388427734, 41.5940055847168 ], [ -112.02779388427734, 41.60171890258789 ], [ -112.03945922851562, 41.60126495361328 ], [ -112.04007720947266, 41.608524322509766 ], [ -112.04744720458984, 41.608524322509766 ], [ -112.0474624633789, 41.62804412841797 ], [ -112.05974578857422, 41.62758255004883 ], [ -112.05975341796875, 41.640296936035156 ], [ -112.050537109375, 41.64030075073242 ], [ -112.05054473876953, 41.64983367919922 ], [ -112.04132843017578, 41.64983367919922 ], [ -112.04195404052734, 41.66299819946289 ], [ -112.05793762207031, 41.662540435791016 ], [ -112.0579605102539, 41.692047119140625 ], [ -112.07394409179688, 41.692039489746094 ], [ -112.07459259033203, 41.72381591796875 ], [ -112.06167602539062, 41.72382354736328 ], [ -112.0616683959961, 41.71383285522461 ], [ -112.05490112304688, 41.713836669921875 ], [ -112.04137420654297, 41.71384048461914 ], [ -112.04138946533203, 41.7379035949707 ], [ -112.0376968383789, 41.74108123779297 ], [ -112.03339385986328, 41.741085052490234 ], [ -112.02908325195312, 41.729736328125 ], [ -112.02599334716797, 41.71657180786133 ], [ -112.0241470336914, 41.71157455444336 ], [ -112.0272216796875, 41.704769134521484 ], [ -112.02413940429688, 41.70068359375 ], [ -112.01676177978516, 41.69977951049805 ], [ -112.01615142822266, 41.7070426940918 ], [ -112.00508117675781, 41.707496643066406 ], [ -112.00508117675781, 41.66618347167969 ], [ -111.9792709350586, 41.6666374206543 ], [ -111.9786605834961, 41.653926849365234 ], [ -111.96821594238281, 41.65346908569336 ], [ -111.96760559082031, 41.6407585144043 ], [ -111.96146392822266, 41.6407585144043 ], [ -111.96025085449219, 41.61125183105469 ], [ -111.95042419433594, 41.61124801635742 ], [ -111.94796752929688, 41.60988235473633 ], [ -111.94735717773438, 41.60761260986328 ], [ -111.9522705078125, 41.60443878173828 ], [ -111.96455383300781, 41.60262680053711 ], [ -111.9682388305664, 41.60398864746094 ], [ -111.9725341796875, 41.60807418823242 ], [ -111.97560119628906, 41.60943603515625 ], [ -111.97928619384766, 41.61034393310547 ], [ -111.98542785644531, 41.609439849853516 ], [ -111.98481750488281, 41.58356475830078 ], [ -111.97868347167969, 41.58356857299805 ], [ -111.97745513916016, 41.570404052734375 ], [ -111.97132110595703, 41.57085418701172 ], [ -111.97132110595703, 41.56450271606445 ], [ -111.98297882080078, 41.564048767089844 ], [ -111.98175811767578, 41.54090118408203 ], [ -111.98176574707031, 41.53545379638672 ], [ -112.00323486328125, 41.53545379638672 ], [ -112.00384521484375, 41.552703857421875 ] ] ] }, "type": "Feature", "id": "1", "properties": { "PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT" } } Fiona-1.7.10/tests/data/sequence.txt000066400000000000000000000153301317446052300172670ustar00rootroot00000000000000{"geometry": {"type": "Polygon", "coordinates": [[[-111.73527526855469, 41.995094299316406], [-111.65931701660156, 41.99627685546875], [-111.6587142944336, 41.9921875], [-111.65888977050781, 41.95676803588867], [-111.67082977294922, 41.91230010986328], [-111.67332458496094, 41.905494689941406], [-111.67088317871094, 41.90049362182617], [-111.66474914550781, 41.893211364746094], [-111.6506576538086, 41.875465393066406], [-111.64759826660156, 41.87091827392578], [-111.64640808105469, 41.86273956298828], [-111.64334869384766, 41.858192443847656], [-111.63720703125, 41.85499572753906], [-111.633544921875, 41.847267150878906], [-111.63053894042969, 41.83409118652344], [-111.6330337524414, 41.82728576660156], [-111.63983154296875, 41.8227653503418], [-111.6484603881836, 41.82188034057617], [-111.66077423095703, 41.82327651977539], [-111.6712417602539, 41.82330322265625], [-111.67618560791016, 41.82013702392578], [-111.68803405761719, 41.78792953491211], [-111.69361114501953, 41.77931594848633], [-111.70162200927734, 41.77797317504883], [-111.70901489257812, 41.77663040161133], [-111.71395111083984, 41.772098541259766], [-111.71891784667969, 41.763031005859375], [-111.72816467285156, 41.75851058959961], [-111.74726104736328, 41.75537109375], [-111.75650024414062, 41.752662658691406], [-111.77067565917969, 41.7445182800293], [-111.77064514160156, 41.75495910644531], [-111.75585174560547, 41.76219940185547], [-111.7330551147461, 41.766693115234375], [-111.72749328613281, 41.77212905883789], [-111.71883392333984, 41.7834587097168], [-111.71080780029297, 41.78889083862305], [-111.70340728759766, 41.79250717163086], [-111.70030212402344, 41.798404693603516], [-111.70210266113281, 41.8088493347168], [-111.70760345458984, 41.819759368896484], [-111.71312713623047, 41.82340621948242], [-111.71929168701172, 41.82341766357422], [-111.72545623779297, 41.8225212097168], [-111.7341537475586, 41.803016662597656], [-111.740966796875, 41.79213333129883], [-111.74531555175781, 41.78215408325195], [-111.77122497558594, 41.7658576965332], [-111.77056884765625, 41.77811813354492], [-111.7662582397461, 41.778106689453125], [-111.76746368408203, 41.78628158569336], [-111.76253509521484, 41.78627395629883], [-111.76241302490234, 41.82259750366211], [-111.77104187011719, 41.8221549987793], [-111.77161407470703, 41.83351135253906], [-111.7333755493164, 41.84524154663086], [-111.73274993896484, 41.847511291503906], [-111.7376708984375, 41.84979248046875], [-111.77157592773438, 41.845767974853516], [-111.77215576171875, 41.85802459716797], [-111.75243377685547, 41.85844802856445], [-111.72467803955078, 41.86384201049805], [-111.71109771728516, 41.868804931640625], [-111.70182037353516, 41.87604904174805], [-111.69624328613281, 41.88193893432617], [-111.69497680664062, 41.88874816894531], [-111.70053100585938, 41.89057540893555], [-111.70793151855469, 41.88923263549805], [-111.72091674804688, 41.87972640991211], [-111.73388671875, 41.87384796142578], [-111.75301361083984, 41.86888885498047], [-111.75350952148438, 41.90249252319336], [-111.74364471435547, 41.90247344970703], [-111.74463653564453, 41.967864990234375], [-111.7119369506836, 41.96416473388672], [-111.69283294677734, 41.95912551879883], [-111.68911743164062, 41.96047592163086], [-111.6891098022461, 41.96320343017578], [-111.69341278076172, 41.96684646606445], [-111.70449829101562, 41.972320556640625], [-111.7341079711914, 41.97828674316406], [-111.73527526855469, 41.995094299316406]]]}, "type": "Feature", "id": "0", "properties": {"PERIMETER": 1.22107, "FEATURE2": null, "NAME": "Mount Naomi Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", "AGBUR": "FS", "AREA": 0.0179264, "STATE_FIPS": "49", "WILDRNP020": 332, "STATE": "UT"}} {"geometry": {"type": "Polygon", "coordinates": [[[-112.00384521484375, 41.552703857421875], [-112.00446319580078, 41.56586456298828], [-112.0112075805664, 41.56586456298828], [-112.01121520996094, 41.57902526855469], [-112.01734924316406, 41.57902526855469], [-112.0173568725586, 41.594459533691406], [-112.02779388427734, 41.5940055847168], [-112.02779388427734, 41.60171890258789], [-112.03945922851562, 41.60126495361328], [-112.04007720947266, 41.608524322509766], [-112.04744720458984, 41.608524322509766], [-112.0474624633789, 41.62804412841797], [-112.05974578857422, 41.62758255004883], [-112.05975341796875, 41.640296936035156], [-112.050537109375, 41.64030075073242], [-112.05054473876953, 41.64983367919922], [-112.04132843017578, 41.64983367919922], [-112.04195404052734, 41.66299819946289], [-112.05793762207031, 41.662540435791016], [-112.0579605102539, 41.692047119140625], [-112.07394409179688, 41.692039489746094], [-112.07459259033203, 41.72381591796875], [-112.06167602539062, 41.72382354736328], [-112.0616683959961, 41.71383285522461], [-112.05490112304688, 41.713836669921875], [-112.04137420654297, 41.71384048461914], [-112.04138946533203, 41.7379035949707], [-112.0376968383789, 41.74108123779297], [-112.03339385986328, 41.741085052490234], [-112.02908325195312, 41.729736328125], [-112.02599334716797, 41.71657180786133], [-112.0241470336914, 41.71157455444336], [-112.0272216796875, 41.704769134521484], [-112.02413940429688, 41.70068359375], [-112.01676177978516, 41.69977951049805], [-112.01615142822266, 41.7070426940918], [-112.00508117675781, 41.707496643066406], [-112.00508117675781, 41.66618347167969], [-111.9792709350586, 41.6666374206543], [-111.9786605834961, 41.653926849365234], [-111.96821594238281, 41.65346908569336], [-111.96760559082031, 41.6407585144043], [-111.96146392822266, 41.6407585144043], [-111.96025085449219, 41.61125183105469], [-111.95042419433594, 41.61124801635742], [-111.94796752929688, 41.60988235473633], [-111.94735717773438, 41.60761260986328], [-111.9522705078125, 41.60443878173828], [-111.96455383300781, 41.60262680053711], [-111.9682388305664, 41.60398864746094], [-111.9725341796875, 41.60807418823242], [-111.97560119628906, 41.60943603515625], [-111.97928619384766, 41.61034393310547], [-111.98542785644531, 41.609439849853516], [-111.98481750488281, 41.58356475830078], [-111.97868347167969, 41.58356857299805], [-111.97745513916016, 41.570404052734375], [-111.97132110595703, 41.57085418701172], [-111.97132110595703, 41.56450271606445], [-111.98297882080078, 41.564048767089844], [-111.98175811767578, 41.54090118408203], [-111.98176574707031, 41.53545379638672], [-112.00323486328125, 41.53545379638672], [-112.00384521484375, 41.552703857421875]]]}, "type": "Feature", "id": "1", "properties": {"PERIMETER": 0.755827, "FEATURE2": null, "NAME": "Wellsville Mountain Wilderness", "FEATURE1": "Wilderness", "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", "AGBUR": "FS", "AREA": 0.0104441, "STATE_FIPS": "49", "WILDRNP020": 336, "STATE": "UT"}} Fiona-1.7.10/tests/data/test_gpx.gpx000066400000000000000000000046341317446052300173000ustar00rootroot00000000000000 11.0 10.9 10.7 10.5 10.4 10.2 10.0 10.0 10.0 10.2 10.4 10.5 10.5 10.1 9.6 9.1 8.3 7.2 6.6 Fiona-1.7.10/tests/fixtures.py000066400000000000000000000007721317446052300162340ustar00rootroot00000000000000import os.path def read_file(name): return open(os.path.join(os.path.dirname(__file__), name)).read() # GeoJSON feature collection on a single line feature_collection = read_file('data/collection.txt') # Same as above but with pretty-print styling applied feature_collection_pp = read_file('data/collection-pp.txt') # One feature per line feature_seq = read_file('data/sequence.txt') # Same as above but each feature has pretty-print styling feature_seq_pp_rs = read_file('data/sequence-pp.txt') Fiona-1.7.10/tests/test_bigint.py000066400000000000000000000051641317446052300166760ustar00rootroot00000000000000import fiona import os import shutil import tempfile import unittest from fiona.ogrext import calc_gdal_version_num, get_gdal_version_num """ OGR 54bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 Shapefile: OFTInteger fields are created by default with a width of 9 characters, so to be unambiguously read as OFTInteger (and if specifying integer that require 10 or 11 characters. the field is dynamically extended like managed since a few versions). OFTInteger64 fields are created by default with a width of 18 digits, so to be unambiguously read as OFTInteger64, and extented to 19 or 20 if needed. Integer fields of width between 10 and 18 will be read as OFTInteger64. Above they will be treated as OFTReal. In previous GDAL versions, Integer fields were created with a default with of 10, and thus will be now read as OFTInteger64. An open option, DETECT_TYPE=YES, can be specified so as OGR does a full scan of the DBF file to see if integer fields of size 10 or 11 hold 32 bit or 64 bit values and adjust the type accordingly (and same for integer fields of size 19 or 20, in case of overflow of 64 bit integer, OFTReal is chosen) """ class TestBigInt(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def testCreateBigIntSchema(self): name = os.path.join(self.tempdir, 'output1.shp') a_bigint = 10 ** 18 - 1 fieldname = 'abigint' kwargs = { 'driver': 'ESRI Shapefile', 'crs': 'EPSG:4326', 'schema': { 'geometry': 'Point', 'properties': [(fieldname, 'int:10')]}} if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): with self.assertRaises(OverflowError): with fiona.open(name, 'w', **kwargs) as dst: rec = {} rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} rec['properties'] = {fieldname: a_bigint} dst.write(rec) else: with fiona.open(name, 'w', **kwargs) as dst: rec = {} rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} rec['properties'] = {fieldname: a_bigint} dst.write(rec) with fiona.open(name) as src: if get_gdal_version_num() >= calc_gdal_version_num(2, 0, 0): first = next(iter(src)) self.assertEqual(first['properties'][fieldname], a_bigint) if __name__ == "__main__": # import sys;sys.argv = ['', 'Test.testName'] unittest.main() Fiona-1.7.10/tests/test_bounds.py000066400000000000000000000010421317446052300167030ustar00rootroot00000000000000import fiona def test_bounds_point(): g = {'type': 'Point', 'coordinates': [10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) def test_bounds_line(): g = {'type': 'LineString', 'coordinates': [[0, 0], [10, 10]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_polygon(): g = {'type': 'Polygon', 'coordinates': [[[0, 0], [10, 10], [10, 0]]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_z(): g = {'type': 'Point', 'coordinates': [10,10,10]} assert fiona.bounds(g) == (10, 10, 10, 10) Fiona-1.7.10/tests/test_bytescollection.py000066400000000000000000000172001317446052300206160ustar00rootroot00000000000000# Testing BytesCollection import sys import unittest import pytest import six import fiona FIXME_WINDOWS = sys.platform.startswith('win') class ReadingTest(unittest.TestCase): def setUp(self): with open('tests/data/coutwildrnp.json') as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf) def tearDown(self): self.c.close() @unittest.skipIf(six.PY2, 'string are bytes in Python 2') def test_construct_with_str(self): with open('tests/data/coutwildrnp.json') as src: strbuf = src.read() self.assertRaises(ValueError, fiona.BytesCollection, strbuf) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_open_repr(self): # I'm skipping checking the name of the virtual file as it produced by uuid. self.assertTrue(repr(self.c).startswith(" 0) def test_mode(self): self.assertEqual(self.c.mode, 'r') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_collection(self): self.assertEqual(self.c.encoding, 'utf-8') def test_iter(self): self.assertTrue(iter(self.c)) def test_closed_no_iter(self): self.c.close() self.assertRaises(ValueError, iter, self.c) def test_len(self): self.assertEqual(len(self.c), 67) def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() self.assertEqual(len(self.c), 0) def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() self.assertEqual(len(self.c), 67) def test_driver(self): self.assertEqual(self.c.driver, "GeoJSON") def test_closed_driver(self): self.c.close() self.assertEqual(self.c.driver, None) def test_driver_closed_driver(self): self.c.driver self.c.close() self.assertEqual(self.c.driver, "GeoJSON") def test_schema(self): s = self.c.schema['properties'] self.assertEqual(s['PERIMETER'], "float") self.assertEqual(s['NAME'], "str") self.assertEqual(s['URL'], "str") self.assertEqual(s['STATE_FIPS'], "str") self.assertEqual(s['WILDRNP020'], "int") def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() self.assertEqual(self.c.schema, None) def test_schema_closed_schema(self): self.c.schema self.c.close() self.assertEqual( sorted(self.c.schema.keys()), ['geometry', 'properties']) def test_crs(self): crs = self.c.crs self.assertEqual(crs['init'], 'epsg:4326') def test_crs_wkt(self): crs = self.c.crs_wkt self.assertTrue(crs.startswith('GEOGCS["WGS 84"')) def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() self.assertEqual(self.c.crs, None) def test_crs_closed_crs(self): self.c.crs self.c.close() self.assertEqual( sorted(self.c.crs.keys()), ['init']) def test_meta(self): self.assertEqual( sorted(self.c.meta.keys()), ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6) self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6) self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6) self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6) def test_iter_one(self): itr = iter(self.c) f = next(itr) self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_iter_list(self): f = list(self.c)[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_getitem_one(self): f = self.c[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_no_write(self): self.assertRaises(IOError, self.c.write, {}) def test_iter_items_list(self): i, f = list(self.c.items())[0] self.assertEqual(i, 0) self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_iter_keys_list(self): i = list(self.c.keys())[0] self.assertEqual(i, 0) def test_in_keys(self): self.assertTrue(0 in self.c.keys()) self.assertTrue(0 in self.c) class FilterReadingTest(unittest.TestCase): def setUp(self): with open('tests/data/coutwildrnp.json') as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf) def tearDown(self): self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) self.assertEqual(len(results), 67) f = results[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) self.assertEqual(len(results), 26) results = list(self.c.filter()) self.assertEqual(len(results), 67) def test_filter_mask(self): mask = { 'type': 'Polygon', 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) self.assertEqual(len(results), 26) def test_zipped_bytes_collection(): with open('tests/data/coutwildrnp.zip', 'rb') as src: zip_file_bytes = src.read() with fiona.BytesCollection(zip_file_bytes) as col: assert col.name == 'coutwildrnp' def test_grenada_bytes_geojson(): """Read grenada.geojson as BytesCollection. grenada.geojson is an example of geojson that GDAL's GeoJSON driver will fail to read successfully unless the file's extension reflects its json'ness. """ with open('tests/data/grenada.geojson', 'rb') as src: bytes_grenada_geojson = src.read() # We expect an exception if the GeoJSON driver isn't specified. with pytest.raises(fiona.errors.FionaValueError): with fiona.BytesCollection(bytes_grenada_geojson) as col: pass # If told what driver to use, we should be good. with fiona.BytesCollection(bytes_grenada_geojson, driver='GeoJSON') as col: assert len(col) == 1 Fiona-1.7.10/tests/test_collection.py000066400000000000000000000526221317446052300175560ustar00rootroot00000000000000# Testing collections and workspaces import datetime import logging import os import shutil import sys import subprocess import tempfile import unittest import fiona from fiona.collection import Collection, supported_drivers from fiona.errors import FionaValueError, DriverError FIXME_WINDOWS = sys.platform.startswith('win') WILDSHP = 'tests/data/coutwildrnp.shp' logging.basicConfig(stream=sys.stderr, level=logging.INFO) TEMPDIR = tempfile.gettempdir() class SupportedDriversTest(unittest.TestCase): def test_shapefile(self): self.assertTrue("ESRI Shapefile" in supported_drivers) self.assertEqual( set(supported_drivers["ESRI Shapefile"]), set("raw")) def test_map(self): self.assertTrue("MapInfo File" in supported_drivers) self.assertEqual( set(supported_drivers["MapInfo File"]), set("raw")) class CollectionArgsTest(unittest.TestCase): def test_path(self): self.assertRaises(TypeError, Collection, (0)) def test_mode(self): self.assertRaises(TypeError, Collection, ("foo"), mode=0) def test_driver(self): self.assertRaises(TypeError, Collection, ("foo"), mode='w', driver=1) def test_schema(self): self.assertRaises( TypeError, Collection, ("foo"), mode='w', driver="ESRI Shapefile", schema=1) def test_crs(self): self.assertRaises( TypeError, Collection, ("foo"), mode='w', driver="ESRI Shapefile", schema=0, crs=1) def test_encoding(self): self.assertRaises( TypeError, Collection, ("foo"), mode='r', encoding=1) def test_layer(self): self.assertRaises( TypeError, Collection, ("foo"), mode='r', layer=0.5) def test_vsi(self): self.assertRaises( TypeError, Collection, ("foo"), mode='r', vsi='git') def test_archive(self): self.assertRaises( TypeError, Collection, ("foo"), mode='r', archive=1) def test_write_numeric_layer(self): self.assertRaises(ValueError, Collection, ("foo"), mode='w', layer=1) def test_write_geojson_layer(self): self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='GeoJSON', layer='foo') def test_append_geojson(self): self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='ARCGEN') class OpenExceptionTest(unittest.TestCase): def test_no_archive(self): self.assertRaises(IOError, fiona.open, ("/"), mode='r', vfs="zip:///foo.zip") class ReadingTest(unittest.TestCase): def setUp(self): self.c = fiona.open(WILDSHP, "r") def tearDown(self): self.c.close() def test_open_repr(self): self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_path(self): self.assertEqual(self.c.path, WILDSHP) def test_name(self): self.assertEqual(self.c.name, 'coutwildrnp') def test_mode(self): self.assertEqual(self.c.mode, 'r') def test_collection(self): self.assertEqual(self.c.encoding, 'iso-8859-1') def test_iter(self): self.assertTrue(iter(self.c)) def test_closed_no_iter(self): self.c.close() self.assertRaises(ValueError, iter, self.c) def test_len(self): self.assertEqual(len(self.c), 67) def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() self.assertEqual(len(self.c), 0) def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() self.assertEqual(len(self.c), 67) def test_driver(self): self.assertEqual(self.c.driver, "ESRI Shapefile") def test_closed_driver(self): self.c.close() self.assertEqual(self.c.driver, None) def test_driver_closed_driver(self): self.c.driver self.c.close() self.assertEqual(self.c.driver, "ESRI Shapefile") def test_schema(self): s = self.c.schema['properties'] self.assertEqual(s['PERIMETER'], "float:24.15") self.assertEqual(s['NAME'], "str:80") self.assertEqual(s['URL'], "str:101") self.assertEqual(s['STATE_FIPS'], "str:80") self.assertEqual(s['WILDRNP020'], "int:10") def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() self.assertEqual(self.c.schema, None) def test_schema_closed_schema(self): self.c.schema self.c.close() self.assertEqual( sorted(self.c.schema.keys()), ['geometry', 'properties']) def test_crs(self): crs = self.c.crs self.assertEqual(crs['init'], 'epsg:4326') def test_crs_wkt(self): crs = self.c.crs_wkt self.assertTrue(crs.startswith('GEOGCS["GCS_WGS_1984"')) def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() self.assertEqual(self.c.crs, None) def test_crs_closed_crs(self): self.c.crs self.c.close() self.assertEqual( sorted(self.c.crs.keys()), ['init']) def test_meta(self): self.assertEqual( sorted(self.c.meta.keys()), ['crs', 'crs_wkt', 'driver', 'schema']) def test_profile(self): self.assertEqual( sorted(self.c.profile.keys()), ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6) self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6) self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6) self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6) def test_context(self): with fiona.open(WILDSHP, "r") as c: self.assertEqual(c.name, 'coutwildrnp') self.assertEqual(len(c), 67) self.assertEqual(c.closed, True) def test_iter_one(self): itr = iter(self.c) f = next(itr) self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_iter_list(self): f = list(self.c)[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_getitem_one(self): f = self.c[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_getitem_iter_combo(self): i = iter(self.c) f = next(i) f = next(i) self.assertEqual(f['id'], "1") f = self.c[0] self.assertEqual(f['id'], "0") f = next(i) self.assertEqual(f['id'], "2") def test_no_write(self): self.assertRaises(IOError, self.c.write, {}) def test_iter_items_list(self): i, f = list(self.c.items())[0] self.assertEqual(i, 0) self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_iter_keys_list(self): i = list(self.c.keys())[0] self.assertEqual(i, 0) def test_in_keys(self): self.assertTrue(0 in self.c.keys()) self.assertTrue(0 in self.c) class FilterReadingTest(unittest.TestCase): def setUp(self): self.c = fiona.open(WILDSHP, "r") def tearDown(self): self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) self.assertEqual(len(results), 67) f = results[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) self.assertEqual(len(results), 26) results = list(self.c.filter()) self.assertEqual(len(results), 67) def test_filter_mask(self): mask = { 'type': 'Polygon', 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) self.assertEqual(len(results), 26) class UnsupportedDriverTest(unittest.TestCase): def test_immediate_fail_driver(self): schema = { 'geometry': 'Point', 'properties': {'label': 'str', u'verit\xe9': 'int'}} self.assertRaises( DriverError, fiona.open, os.path.join(TEMPDIR, "foo"), "w", "Bogus", schema=schema) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test isn't working. There is a codepage issue regarding Windows-1252 and UTF-8. ") class GenericWritingTest(unittest.TestCase): @classmethod def setUpClass(self): self.tempdir = tempfile.mkdtemp() schema = { 'geometry': 'Point', 'properties': [('label', 'str'), (u'verit\xe9', 'int')]} self.c = fiona.open(os.path.join(self.tempdir, "test-no-iter.shp"), 'w', driver="ESRI Shapefile", schema=schema, encoding='Windows-1252') @classmethod def tearDownClass(self): self.c.close() shutil.rmtree(self.tempdir) def test_encoding(self): self.assertEqual(self.c.encoding, 'Windows-1252') def test_no_iter(self): self.assertRaises(IOError, iter, self.c) def test_no_filter(self): self.assertRaises(IOError, self.c.filter) class PointWritingTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() self.filename = os.path.join(self.tempdir, "point_writing_test.shp") self.sink = fiona.open( self.filename, "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('title', 'str'), ('date', 'date')]}, crs='epsg:4326', encoding='utf-8') def tearDown(self): self.sink.close() shutil.rmtree(self.tempdir) def test_cpg(self): """Requires GDAL 1.9""" self.sink.close() self.assertTrue(open(os.path.join( self.tempdir, "point_writing_test.cpg")).readline() == 'UTF-8') def test_write_one(self): self.assertEqual(len(self.sink), 0) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) f = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) self.assertEqual(len(self.sink), 1) self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.1)) self.sink.close() info = subprocess.check_output( ["ogrinfo", self.filename, "point_writing_test"]) self.assertTrue( 'date (Date) = 2012/01/29' in info.decode('utf-8'), info) def test_write_two(self): self.assertEqual(len(self.sink), 0) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) f1 = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'title': 'point two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) self.assertEqual(len(self.sink), 2) self.assertEqual(self.sink.bounds, (0.0, -0.1, 0.0, 0.1)) def test_write_one_null_geom(self): self.assertEqual(len(self.sink), 0) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) f = { 'geometry': None, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) self.assertEqual(len(self.sink), 1) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) def test_validate_record(self): fvalid = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} finvalid = { 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'not-a-title': 'point two', 'date': "2012-01-29"}} self.assertTrue(self.sink.validate_record(fvalid)) self.assertFalse(self.sink.validate_record(finvalid)) class LineWritingTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() self.sink = fiona.open( os.path.join(self.tempdir, "line_writing_test.shp"), "w", driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': [('title', 'str'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}) def tearDown(self): self.sink.close() shutil.rmtree(self.tempdir) def test_write_one(self): self.assertEqual(len(self.sink), 0) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) f = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} self.sink.writerecords([f]) self.assertEqual(len(self.sink), 1) self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.2)) def test_write_two(self): self.assertEqual(len(self.sink), 0) self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) self.assertEqual(len(self.sink), 2) self.assertEqual(self.sink.bounds, (0.0, -0.2, 0.0, 0.2)) class PointAppendTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() with fiona.open(WILDSHP, "r") as input: output_schema = input.schema.copy() output_schema['geometry'] = '3D Point' with fiona.open( os.path.join(self.tempdir, "test_append_point.shp"), 'w', crs=None, driver="ESRI Shapefile", schema=output_schema) as output: for f in input: f['geometry'] = { 'type': 'Point', 'coordinates': f['geometry']['coordinates'][0][0]} output.write(f) def tearDown(self): shutil.rmtree(self.tempdir) def test_append_point(self): with fiona.open(os.path.join(self.tempdir, "test_append_point.shp"), "a") as c: self.assertEqual(c.schema['geometry'], 'Point') c.write({'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, 'properties': {'PERIMETER': 1.0, 'FEATURE2': None, 'NAME': 'Foo', 'FEATURE1': None, 'URL': 'http://example.com', 'AGBUR': 'BAR', 'AREA': 0.0, 'STATE_FIPS': 1, 'WILDRNP020': 1, 'STATE': 'XL'}}) self.assertEqual(len(c), 68) class LineAppendTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() with fiona.open( os.path.join(self.tempdir, "test_append_line.shp"), "w", driver="ESRI Shapefile", schema={ 'geometry': 'MultiLineString', 'properties': {'title': 'str', 'date': 'date'}}, crs={'init': "epsg:4326", 'no_defs': True}) as output: f = {'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.1), (0.0, 0.2)]]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} output.writerecords([f]) def tearDown(self): shutil.rmtree(self.tempdir) def test_append_line(self): with fiona.open(os.path.join(self.tempdir, "test_append_line.shp"), "a") as c: self.assertEqual(c.schema['geometry'], 'LineString') f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} f2 = { 'geometry': {'type': 'MultiLineString', 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} c.writerecords([f1, f2]) self.assertEqual(len(c), 3) self.assertEqual(c.bounds, (0.0, -0.2, 0.0, 0.2)) class ShapefileFieldWidthTest(unittest.TestCase): def test_text(self): self.tempdir = tempfile.mkdtemp() with fiona.open( os.path.join(self.tempdir, "textfield.shp"), 'w', schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}, driver="ESRI Shapefile") as c: c.write( {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, 'properties': {'text': 'a' * 254}}) c = fiona.open(os.path.join(self.tempdir, "textfield.shp"), "r") self.assertEqual(c.schema['properties']['text'], 'str:254') f = next(iter(c)) self.assertEqual(f['properties']['text'], 'a' * 254) c.close() def tearDown(self): shutil.rmtree(self.tempdir) class CollectionTest(unittest.TestCase): def test_invalid_mode(self): self.assertRaises(ValueError, fiona.open, os.path.join(TEMPDIR, "bogus.shp"), "r+") def test_w_args(self): self.assertRaises(FionaValueError, fiona.open, os.path.join(TEMPDIR, "test-no-iter.shp"), "w") self.assertRaises( FionaValueError, fiona.open, os.path.join(TEMPDIR, "test-no-iter.shp"), "w", "Driver") def test_no_path(self): self.assertRaises(IOError, fiona.open, "no-path.shp", "a") def test_no_read_conn_str(self): self.assertRaises(IOError, fiona.open, "PG:dbname=databasename", "r") @unittest.skipIf(sys.platform.startswith("win"), reason="test only for *nix based system") def test_no_read_directory(self): self.assertRaises(ValueError, fiona.open, "/dev/null", "r") class GeoJSONCRSWritingTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() self.filename = os.path.join(self.tempdir, "crs_writing_test.json") self.sink = fiona.open( self.filename, "w", driver="GeoJSON", schema={ 'geometry': 'Point', 'properties': [('title', 'str'), ('date', 'date')]}, crs={'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', 'b': 6370997, 'lat_0': 45}) def tearDown(self): self.sink.close() shutil.rmtree(self.tempdir) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Test raises PermissionError. Please look into why this test isn't working.") class DateTimeTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def test_date(self): self.sink = fiona.open( os.path.join(self.tempdir, "date_test.shp"), "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('id', 'int'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}) recs = [{ 'geometry': {'type': 'Point', 'coordinates': (7.0, 50.0)}, 'properties': {'id': 1, 'date': '2013-02-25'} }, { 'geometry': {'type': 'Point', 'coordinates': (7.0, 50.2)}, 'properties': {'id': 1, 'date': datetime.date(2014, 2, 3)} }] self.sink.writerecords(recs) self.sink.close() self.assertEqual(len(self.sink), 2) with fiona.open(os.path.join(self.tempdir, "date_test.shp"), "r") as c: self.assertEqual(len(c), 2) rf1, rf2 = list(c) self.assertEqual(rf1['properties']['date'], '2013-02-25') self.assertEqual(rf2['properties']['date'], '2014-02-03') def tearDown(self): shutil.rmtree(self.tempdir) Fiona-1.7.10/tests/test_collection_crs.py000066400000000000000000000012741317446052300204220ustar00rootroot00000000000000import os import tempfile import fiona import fiona.crs def test_collection_crs_wkt(): with fiona.open('tests/data/coutwildrnp.shp') as src: assert src.crs_wkt.startswith( 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84"') def test_collection_no_crs_wkt(): """crs members of a dataset with no crs can be accessed safely.""" tmpdir = tempfile.gettempdir() filename = os.path.join(tmpdir, 'test.shp') with fiona.open('tests/data/coutwildrnp.shp') as src: profile = src.meta del profile['crs'] del profile['crs_wkt'] with fiona.open(filename, 'w', **profile) as dst: assert dst.crs_wkt == "" assert dst.crs == {} Fiona-1.7.10/tests/test_crs.py000066400000000000000000000070001317446052300162000ustar00rootroot00000000000000from fiona import crs, _crs def test_proj_keys(): assert len(crs.all_proj_keys) == 87 assert 'init' in crs.all_proj_keys assert 'proj' in crs.all_proj_keys assert 'no_mayo' in crs.all_proj_keys def test_from_string(): # A PROJ.4 string with extra whitespace. val = crs.from_string( " +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs +foo ") assert len(val.items()) == 4 assert val['proj'] == 'longlat' assert val['ellps'] == 'WGS84' assert val['datum'] == 'WGS84' assert val['no_defs'] assert 'foo' not in val def test_from_string_utm(): # A PROJ.4 string with extra whitespace and integer UTM zone. val = crs.from_string( " +proj=utm +zone=13 +ellps=WGS84 +foo ") assert len(val.items()) == 3 assert val['proj'] == 'utm' assert val['ellps'] == 'WGS84' assert val['zone'] == 13 assert 'foo' not in val def test_to_string(): # Make a string from a mapping with a few bogus items val = { 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} assert crs.to_string( val) == "+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat" def test_to_string_utm(): # Make a string from a mapping with a few bogus items val = { 'proj': 'utm', 'ellps': 'WGS84', 'zone': 13, 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} assert crs.to_string( val) == "+ellps=WGS84 +no_defs +proj=utm +zone=13" def test_to_string_epsg(): val = {'init': 'epsg:4326', 'no_defs': True} assert crs.to_string(val) == "+init=epsg:4326 +no_defs" def test_to_string_zeroval(): # Make a string with some 0 values (e.g. esri:102017) val = {'proj': 'laea', 'lat_0': 90, 'lon_0': 0, 'x_0': 0, 'y_0': 0, 'ellps': 'WGS84', 'datum': 'WGS84', 'units': 'm', 'no_defs': True} assert crs.to_string(val) == ( "+datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs +proj=laea " "+units=m +x_0=0 +y_0=0") def test_from_epsg(): val = crs.from_epsg(4326) assert val['init'] == "epsg:4326" assert val['no_defs'] def test_from_epsg_neg(): try: crs.from_epsg(-1) except ValueError: pass except: raise def test_to_string_unicode(): # See issue #83. val = crs.to_string({ u'units': u'm', u'no_defs': True, u'datum': u'NAD83', u'proj': u'utm', u'zone': 16}) assert 'NAD83' in val def test_wktext(): """Test +wktext parameter is preserved.""" proj4 = ('+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 ' '+x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext ' '+no_defs') assert 'wktext' in crs.from_string(proj4) def test_towgs84(): """+towgs84 is preserved""" proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' '+units=m +wktext +no_defs') assert 'towgs84' in crs.from_string(proj4) def test_towgs84_wkt(): """+towgs84 +wktext are preserved in WKT""" proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' '+units=m +wktext +no_defs') assert 'towgs84' in _crs.crs_to_wkt(proj4) assert 'wktext' in _crs.crs_to_wkt(proj4) Fiona-1.7.10/tests/test_drivers.py000066400000000000000000000020151317446052300170700ustar00rootroot00000000000000import logging import os.path import shutil import sys import tempfile import unittest import fiona logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Raises PermissionError Please look into why this test isn't working.") def test_options(tmpdir=None): """Test that setting CPL_DEBUG=ON works""" if tmpdir is None: tempdir = tempfile.mkdtemp() logfile = os.path.join(tempdir, 'example.log') else: logfile = str(tmpdir.join('example.log')) logger = logging.getLogger('Fiona') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logfile) fh.setLevel(logging.DEBUG) logger.addHandler(fh) with fiona.drivers(CPL_DEBUG=True): c = fiona.open("tests/data/coutwildrnp.shp") c.close() log = open(logfile).read() assert "Option CPL_DEBUG" in log if tempdir and tmpdir is None: shutil.rmtree(tempdir) Fiona-1.7.10/tests/test_feature.py000066400000000000000000000104571317446052300170560ustar00rootroot00000000000000# testing features, to be called by nosetests import logging import os import shutil import sys import tempfile import unittest from fiona import collection from fiona.collection import Collection from fiona.ogrext import featureRT #logging.basicConfig(stream=sys.stderr, level=logging.INFO) class PointRoundTripTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Point', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", driver="ESRI Shapefile", schema=schema) def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual( sorted(g['geometry'].items()), [('coordinates', (0.0, 0.0)), ('type', 'Point')]) def test_properties(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual(g['properties']['title'], 'foo') def test_none_property(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': None} } g = featureRT(f, self.c) self.assertEqual(g['properties']['title'], None) class LineStringRoundTripTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'LineString', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': { 'type': 'LineString', 'coordinates': [(0.0, 0.0), (1.0, 1.0)] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual( sorted(g['geometry'].items()), [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), ('type', 'LineString')]) def test_properties(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual(g['properties']['title'], 'foo') class PolygonRoundTripTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Polygon', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): f = { 'id': '1', 'geometry': { 'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual( sorted(g['geometry'].items()), [('coordinates', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), ('type', 'Polygon')]) def test_properties(self): f = { 'id': '1', 'geometry': { 'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) self.assertEqual(g['properties']['title'], 'foo') Fiona-1.7.10/tests/test_fio_bounds.py000066400000000000000000000060071317446052300175460ustar00rootroot00000000000000import json import re import click from click.testing import CliRunner from fiona.fio import bounds from .fixtures import ( feature_collection, feature_collection_pp, feature_seq, feature_seq_pp_rs) def test_fail(): runner = CliRunner() result = runner.invoke(bounds.bounds, [], '5') assert result.exit_code == 1 def test_seq(): runner = CliRunner() result = runner.invoke(bounds.bounds, [], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_seq_rs(): runner = CliRunner() result = runner.invoke(bounds.bounds, [], feature_seq_pp_rs) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_precision(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--precision', 1], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d{1}\D', result.output)) == 8 def test_explode(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--explode'], feature_collection) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_explode_pp(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--explode'], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 def test_with_id(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--with-id'], feature_seq) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 def test_explode_with_id(): runner = CliRunner() result = runner.invoke( bounds.bounds, ['--explode', '--with-id'], feature_collection) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 def test_with_obj(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--with-obj'], feature_seq) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 def test_bounds_explode_with_obj(): runner = CliRunner() result = runner.invoke( bounds.bounds, ['--explode', '--with-obj'], feature_collection) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 def test_explode_output_rs(): runner = CliRunner() result = runner.invoke(bounds.bounds, ['--explode', '--rs'], feature_collection) assert result.exit_code == 0 assert result.output.count(u'\u001e') == 2 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 Fiona-1.7.10/tests/test_fio_calc.py000066400000000000000000000036621317446052300171620ustar00rootroot00000000000000from __future__ import division import json from click.testing import CliRunner from fiona.fio.calc import calc from .fixtures import feature_seq def test_fail(): runner = CliRunner() result = runner.invoke(calc, ["TEST", "f.properties.test > 5"], '{"type": "no_properties"}') assert result.exit_code == 1 def _load(output): features = [] for x in output.splitlines(): try: features.append(json.loads(x)) except: pass # nosetests puts some debugging garbage to stdout return features def test_calc_seq(): runner = CliRunner() result = runner.invoke(calc, ["TEST", "f.properties.AREA / f.properties.PERIMETER"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 for feat in feats: assert feat['properties']['TEST'] == \ feat['properties']['AREA'] / feat['properties']['PERIMETER'] def test_bool_seq(): runner = CliRunner() result = runner.invoke(calc, ["TEST", "f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 assert feats[0]['properties']['TEST'] == True assert feats[1]['properties']['TEST'] == False def test_existing_property(): runner = CliRunner() result = runner.invoke(calc, ["AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 1 result = runner.invoke(calc, ["--overwrite", "AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 for feat in feats: assert 'AREA' in feat['properties'] Fiona-1.7.10/tests/test_fio_cat.py000066400000000000000000000046231317446052300170250ustar00rootroot00000000000000import json import sys import unittest from click.testing import CliRunner from fiona.fio import cat from .fixtures import feature_seq from .fixtures import feature_seq_pp_rs WILDSHP = 'tests/data/coutwildrnp.shp' FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_one(): runner = CliRunner() result = runner.invoke(cat.cat, [WILDSHP]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_two(): runner = CliRunner() result = runner.invoke(cat.cat, [WILDSHP, WILDSHP]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 134 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_bbox_no(): runner = CliRunner() result = runner.invoke( cat.cat, [WILDSHP, '--bbox', '0,10,80,20'], catch_exceptions=False) assert result.exit_code == 0 assert result.output == "" @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_bbox_yes(): runner = CliRunner() result = runner.invoke( cat.cat, [WILDSHP, '--bbox', '-109,37,-107,39'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_bbox_json_yes(): runner = CliRunner() result = runner.invoke( cat.cat, [WILDSHP, '--bbox', '[-109,37,-107,39]'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 def test_multi_layer(): layerdef = "1:coutwildrnp,1:coutwildrnp" runner = CliRunner() result = runner.invoke( cat.cat, ['--layer', layerdef, 'tests/data/']) assert result.output.count('"Feature"') == 134 def test_multi_layer_fail(): runner = CliRunner() result = runner.invoke(cat.cat, ['--layer', '200000:coutlildrnp', 'tests/data']) assert result.exit_code != 0 Fiona-1.7.10/tests/test_fio_collect.py000066400000000000000000000055221317446052300177020ustar00rootroot00000000000000"""Unittests for $ fio collect""" import json import sys import unittest from click.testing import CliRunner from fiona.fio import collect from .fixtures import feature_seq from .fixtures import feature_seq_pp_rs FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_collect_rs(): runner = CliRunner() result = runner.invoke( collect.collect, ['--src-crs', 'EPSG:3857'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_collect_no_rs(): runner = CliRunner() result = runner.invoke( collect.collect, ['--src-crs', 'EPSG:3857'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 def test_collect_ld(): runner = CliRunner() result = runner.invoke( collect.collect, ['--with-ld-context', '--add-ld-context-item', 'foo=bar'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert '"@context": {' in result.output assert '"foo": "bar"' in result.output def test_collect_rec_buffered(): runner = CliRunner() result = runner.invoke(collect.collect, ['--record-buffered'], feature_seq) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output def test_collect_noparse(): runner = CliRunner() result = runner.invoke( collect.collect, ['--no-parse'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 def test_collect_noparse_records(): runner = CliRunner() result = runner.invoke( collect.collect, ['--no-parse', '--record-buffered'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 def test_collect_src_crs(): runner = CliRunner() result = runner.invoke( collect.collect, ['--no-parse', '--src-crs', 'epsg:4326'], feature_seq, catch_exceptions=False) assert result.exit_code == 2 def test_collect_noparse_rs(): runner = CliRunner() result = runner.invoke( collect.collect, ['--no-parse'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 assert len(json.loads(result.output)['features']) == 2 Fiona-1.7.10/tests/test_fio_distrib.py000066400000000000000000000011201317446052300177030ustar00rootroot00000000000000"""Unittests for $ fio distrib""" from click.testing import CliRunner from fiona.fio import distrib from .fixtures import feature_collection from .fixtures import feature_collection_pp def test_distrib(): runner = CliRunner() result = runner.invoke(distrib.distrib, [], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 def test_distrib_no_rs(): runner = CliRunner() result = runner.invoke(distrib.distrib, [], feature_collection) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 Fiona-1.7.10/tests/test_fio_dump.py000066400000000000000000000014371317446052300172230ustar00rootroot00000000000000"""Unittests for $ fio dump""" import sys import unittest from click.testing import CliRunner from fiona.fio import dump WILDSHP = 'tests/data/coutwildrnp.shp' TESTGPX = 'tests/data/test_gpx.gpx' FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_dump(): runner = CliRunner() result = runner.invoke(dump.dump, [WILDSHP]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output def test_dump_layer(): for layer in ('routes', '1'): runner = CliRunner() result = runner.invoke(dump.dump, [TESTGPX, '--layer', layer]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output Fiona-1.7.10/tests/test_fio_filter.py000066400000000000000000000015501317446052300175370ustar00rootroot00000000000000from click.testing import CliRunner from fiona.fio import filter from .fixtures import feature_seq def test_fail(): runner = CliRunner() result = runner.invoke(filter.filter, ["f.properties.test > 5"], "{'type': 'no_properties'}") assert result.exit_code == 1 def test_seq(): runner = CliRunner() result = runner.invoke(filter.filter, ["f.properties.AREA > 0.01"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 2 result = runner.invoke(filter.filter, ["f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 1 result = runner.invoke(filter.filter, ["f.properties.AREA > 0.02"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 0 Fiona-1.7.10/tests/test_fio_info.py000066400000000000000000000060101317446052300172010ustar00rootroot00000000000000import json from pkg_resources import iter_entry_points import re import sys import unittest from click.testing import CliRunner from fiona.fio.main import main_group WILDSHP = 'tests/data/coutwildrnp.shp' FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_info_json(): runner = CliRunner() result = runner.invoke(main_group, ['info', WILDSHP]) assert result.exit_code == 0 assert '"count": 67' in result.output assert '"crs": "EPSG:4326"' in result.output assert '"driver": "ESRI Shapefile"' in result.output assert '"name": "coutwildrnp"' in result.output @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_info_count(): runner = CliRunner() result = runner.invoke(main_group, ['info', '--count', WILDSHP]) assert result.exit_code == 0 assert result.output == "67\n" @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_info_bounds(): runner = CliRunner() result = runner.invoke(main_group, ['info', '--bounds', WILDSHP]) assert result.exit_code == 0 assert len(re.findall(r'\d*\.\d*', result.output)) == 4 def test_all_registered(): # Make sure all the subcommands are actually registered to the main CLI group for ep in iter_entry_points('fiona.fio_commands'): assert ep.name in main_group.commands def _filter_info_warning(lines): """$ fio info can issue a RuntimeWarning, but click adds stderr to stdout so we have to filter it out before decoding JSON lines.""" lines = list(filter(lambda x: 'RuntimeWarning' not in x, lines)) return lines @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_info_no_count(): """Make sure we can still get a `$ fio info` report on datasources that do not support feature counting, AKA `len(collection)`. """ runner = CliRunner() result = runner.invoke(main_group, ['info', 'tests/data/test_gpx.gpx']) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "First line is warning & second is JSON. No more." assert json.loads(lines[0])['count'] is None @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_info_layer(): for layer in ('routes', '1'): runner = CliRunner() result = runner.invoke(main_group, [ 'info', 'tests/data/test_gpx.gpx', '--layer', layer]) print(result.output) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "1st line is warning & 2nd is JSON - no more." assert json.loads(lines[0])['name'] == 'routes' Fiona-1.7.10/tests/test_fio_load.py000066400000000000000000000130251317446052300171710ustar00rootroot00000000000000import json import os import shutil import sys import tempfile import unittest from click.testing import CliRunner import fiona from fiona.fio.main import main_group from .fixtures import ( feature_collection, feature_seq, feature_seq_pp_rs) FIXME_WINDOWS = sys.platform.startswith('win') def test_err(): runner = CliRunner() result = runner.invoke( main_group, ['load'], '', catch_exceptions=False) assert result.exit_code == 2 def test_exception(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], '42', catch_exceptions=False) assert result.exit_code == 1 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_collection(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_collection) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_seq_rs(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_seq_pp_rs) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_seq_no_rs(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, ['load', '-f', 'Shapefile', '--sequence', tmpfile], feature_seq) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_dst_crs_default_to_src_crs(tmpdir=None): # When --dst-crs is not given default to --src-crs. if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, [ 'load', '--src-crs', 'EPSG:32617', '-f', 'Shapefile', '--sequence', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32617'} assert len(src) == len(feature_seq.splitlines()) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_different_crs(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, [ 'load', '--src-crs', 'EPSG:32617', '--dst-crs', 'EPSG:32610', '-f', 'Shapefile', '--sequence', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32610'} assert len(src) == len(feature_seq.splitlines()) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_dst_crs_no_src(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test.shp') else: tmpfile = str(tmpdir.join('test.shp')) runner = CliRunner() result = runner.invoke( main_group, [ 'load', '--dst-crs', 'EPSG:32610', '-f', 'Shapefile', '--sequence', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32610'} assert len(src) == len(feature_seq.splitlines()) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_fio_load_layer(): tmpdir = tempfile.mkdtemp() try: feature = { 'type': 'Feature', 'properties': {'key': 'value'}, 'geometry': { 'type': 'Point', 'coordinates': (5.0, 39.0) } } sequence = os.linesep.join(map(json.dumps, [feature, feature])) runner = CliRunner() result = runner.invoke(main_group, [ 'load', tmpdir, '--driver', 'ESRI Shapefile', '--src-crs', 'EPSG:4236', '--layer', 'test_layer', '--sequence'], input=sequence) assert result.exit_code == 0 with fiona.open(tmpdir) as src: assert len(src) == 2 assert src.name == 'test_layer' assert src.schema['geometry'] == 'Point' finally: shutil.rmtree(tmpdir) Fiona-1.7.10/tests/test_fio_ls.py000066400000000000000000000034351317446052300166740ustar00rootroot00000000000000"""Unittests for `$ fio ls`""" import json import shutil import sys import tempfile import unittest from click.testing import CliRunner import fiona from fiona.fio.main import main_group FIXME_WINDOWS = sys.platform.startswith('win') @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_fio_ls_single_layer(): result = CliRunner().invoke(main_group, [ 'ls', 'tests/data/']) assert result.exit_code == 0 assert len(result.output.splitlines()) == 1 assert json.loads(result.output) == ['coutwildrnp'] @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why this test is not working.") def test_fio_ls_indent(): result = CliRunner().invoke(main_group, [ 'ls', '--indent', '4', 'tests/data/coutwildrnp.shp']) assert result.exit_code == 0 assert len(result.output.strip().splitlines()) == 3 assert json.loads(result.output) == ['coutwildrnp'] def test_fio_ls_multi_layer(): infile = 'tests/data/coutwildrnp.shp' outdir = tempfile.mkdtemp() try: # Copy test shapefile into new directory # Shapefile driver treats a directory of shapefiles as a single # multi-layer datasource layer_names = ['l1', 'l2'] for layer in layer_names: with fiona.open(infile) as src, \ fiona.open(outdir, 'w', layer=layer, **src.meta) as dst: for feat in src: dst.write(feat) # Run CLI test result = CliRunner().invoke(main_group, [ 'ls', outdir]) assert result.exit_code == 0 assert json.loads(result.output) == layer_names finally: shutil.rmtree(outdir) Fiona-1.7.10/tests/test_geojson.py000066400000000000000000000043731317446052300170670ustar00rootroot00000000000000 import logging import os import shutil import sys import tempfile import unittest import fiona from fiona.collection import supported_drivers from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError # logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) log = logging.getLogger(__name__) class ReadingTest(unittest.TestCase): def setUp(self): self.c = fiona.open('tests/data/coutwildrnp.json', 'r') def tearDown(self): self.c.close() def test_json(self): self.assertEqual(len(self.c), 67) class WritingTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_json(self): path = os.path.join(self.tempdir, 'foo.json') with fiona.open(path, 'w', driver='GeoJSON', schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: c.writerecords([{ 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, 'properties': {'title': 'One'}}]) c.writerecords([{ 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, 'properties': {'title': 'Two'}}]) with fiona.open(path) as c: self.assertEqual(c.schema['geometry'], 'Unknown') self.assertEqual(len(c), 2) def test_json_overwrite(self): path = os.path.join(self.tempdir, 'foo.json') with fiona.drivers(), fiona.open(path, 'w', driver='GeoJSON', schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: c.writerecords([{ 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, 'properties': {'title': 'One'}}]) c.writerecords([{ 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, 'properties': {'title': 'Two'}}]) # Overwrite should raise DriverIOError. try: with fiona.drivers(), fiona.open(path, 'w', driver='GeoJSON', schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: pass except IOError: pass Fiona-1.7.10/tests/test_geometry.py000066400000000000000000000176331317446052300172610ustar00rootroot00000000000000# testing geometry extension, to be called by nosetests import logging import sys import unittest from fiona._geometry import (GeomBuilder, geometryRT) from fiona.errors import UnsupportedGeometryTypeError logging.basicConfig(stream=sys.stderr, level=logging.INFO) def geometry_wkb(wkb): return GeomBuilder().build_wkb(wkb) class OGRBuilderExceptionsTest(unittest.TestCase): def test(self): geom = {'type': "Bogus", 'coordinates': None} self.assertRaises(ValueError, geometryRT, geom) # The round tripping tests are defined in this not to be run base class. # class RoundTripping(object): """Derive type specific classes from this.""" def test_type(self): self.assertEqual( geometryRT(self.geom)['type'], self.geom['type']) def test_coordinates(self): self.assertEqual( geometryRT(self.geom)['coordinates'], self.geom['coordinates']) # All these get their tests from the RoundTripping class. # class PointRoundTripTest(unittest.TestCase, RoundTripping): def setUp(self): self.geom = {'type': "Point", 'coordinates': (0.0, 0.0)} class LineStringRoundTripTest(unittest.TestCase, RoundTripping): def setUp(self): self.geom = { 'type': "LineString", 'coordinates': [(0.0, 0.0), (1.0, 1.0)]} class PolygonRoundTripTest1(unittest.TestCase, RoundTripping): """An explicitly closed polygon.""" def setUp(self): self.geom = { 'type': "Polygon", 'coordinates': [ [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]} class PolygonRoundTripTest2(unittest.TestCase, RoundTripping): """An implicitly closed polygon.""" def setUp(self): self.geom = { 'type': "Polygon", 'coordinates': [ [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]} def test_coordinates(self): self.assertEqual( [geometryRT(self.geom)['coordinates'][0][:-1]], self.geom['coordinates']) class MultiPointRoundTripTest(unittest.TestCase, RoundTripping): def setUp(self): self.geom = { 'type': "MultiPoint", 'coordinates': [(0.0, 0.0), (1.0, 1.0)]} class MultiLineStringRoundTripTest(unittest.TestCase, RoundTripping): def setUp(self): self.geom = { 'type': "MultiLineString", 'coordinates': [[(0.0, 0.0), (1.0, 1.0)]]} class MultiPolygonRoundTripTest1(unittest.TestCase, RoundTripping): def setUp(self): # This is an explicitly closed polygon. self.geom = { 'type': "MultiPolygon", 'coordinates': [[ [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] ]]} class MultiPolygonRoundTripTest2(unittest.TestCase, RoundTripping): def setUp(self): # This is an implicitly closed polygon. self.geom = { 'type': "MultiPolygon", 'coordinates': [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]} def test_coordinates(self): self.assertEqual( [[geometryRT(self.geom)['coordinates'][0][0][:-1]]], self.geom['coordinates']) class GeometryCollectionRoundTripTest(unittest.TestCase): def setUp(self): self.geom = { 'type': "GeometryCollection", 'geometries': [ {'type': "Point", 'coordinates': (0.0, 0.0)}, { 'type': "LineString", 'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]} def test_len(self): result = geometryRT(self.geom) self.assertEqual(len(result['geometries']), 2) def test_type(self): result = geometryRT(self.geom) self.assertEqual( [g['type'] for g in result['geometries']], ['Point', 'LineString']) class PointTest(unittest.TestCase): def test_point(self): # Hex-encoded Point (0 0) try: wkb = bytes.fromhex("010100000000000000000000000000000000000000") except: wkb = "010100000000000000000000000000000000000000".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "Point") self.assertEqual(geom['coordinates'], (0.0, 0.0)) class LineStringTest(unittest.TestCase): def test_line(self): # Hex-encoded LineString (0 0, 1 1) try: wkb = bytes.fromhex("01020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f") except: wkb = "01020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "LineString") self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)]) class PolygonTest(unittest.TestCase): def test_polygon(self): # 1 x 1 box (0, 0, 1, 1) try: wkb = bytes.fromhex("01030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000") except: wkb = "01030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "Polygon") self.assertEqual(len(geom['coordinates']), 1) self.assertEqual(len(geom['coordinates'][0]), 5) x, y = zip(*geom['coordinates'][0]) self.assertEqual(min(x), 0.0) self.assertEqual(min(y), 0.0) self.assertEqual(max(x), 1.0) self.assertEqual(max(y), 1.0) class MultiPointTest(unittest.TestCase): def test_multipoint(self): try: wkb = bytes.fromhex("0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f") except: wkb = "0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "MultiPoint") self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)]) class MultiLineStringTest(unittest.TestCase): def test_multilinestring(self): # Hex-encoded LineString (0 0, 1 1) try: wkb = bytes.fromhex("01050000000100000001020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f") except: wkb = "01050000000100000001020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "MultiLineString") self.assertEqual(len(geom['coordinates']), 1) self.assertEqual(len(geom['coordinates'][0]), 2) self.assertEqual(geom['coordinates'][0], [(0.0, 0.0), (1.0, 1.0)]) class MultiPolygonTest(unittest.TestCase): def test_multipolygon(self): # [1 x 1 box (0, 0, 1, 1)] try: wkb = bytes.fromhex("01060000000100000001030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000") except: wkb = "01060000000100000001030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex') geom = geometry_wkb(wkb) self.assertEqual(geom['type'], "MultiPolygon") self.assertEqual(len(geom['coordinates']), 1) self.assertEqual(len(geom['coordinates'][0]), 1) self.assertEqual(len(geom['coordinates'][0][0]), 5) x, y = zip(*geom['coordinates'][0][0]) self.assertEqual(min(x), 0.0) self.assertEqual(min(y), 0.0) self.assertEqual(max(x), 1.0) self.assertEqual(max(y), 1.0) Fiona-1.7.10/tests/test_geopackage.py000066400000000000000000000052011317446052300175000ustar00rootroot00000000000000 import logging import os import os.path import shutil import sys import tempfile import unittest import pytest import fiona from fiona.collection import supported_drivers from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError from fiona.ogrext import calc_gdal_version_num, get_gdal_version_num logging.basicConfig(stream=sys.stderr, level=logging.INFO) class ReadingTest(unittest.TestCase): def setUp(self): pass def tearDown(self): pass @pytest.mark.skipif(not os.path.exists('tests/data/coutwildrnp.gpkg'), reason="Requires geopackage fixture") def test_gpkg(self): if get_gdal_version_num() < calc_gdal_version_num(1, 11, 0): self.assertRaises(DriverError, fiona.open, 'tests/data/coutwildrnp.gpkg', 'r', driver="GPKG") else: with fiona.open('tests/data/coutwildrnp.gpkg', 'r', driver="GPKG") as c: self.assertEquals(len(c), 48) class WritingTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) @pytest.mark.skipif(not os.path.exists('tests/data/coutwildrnp.gpkg'), reason="Requires geopackage fixture") def test_gpkg(self): schema = {'geometry': 'Point', 'properties': [('title', 'str')]} crs = { 'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', 'b': 6370997, 'lat_0': 45} path = os.path.join(self.tempdir, 'foo.gpkg') if get_gdal_version_num() < calc_gdal_version_num(1, 11, 0): self.assertRaises(DriverError, fiona.open, path, 'w', driver='GPKG', schema=schema, crs=crs) else: with fiona.open(path, 'w', driver='GPKG', schema=schema, crs=crs) as c: c.writerecords([{ 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, 'properties': {'title': 'One'}}]) c.writerecords([{ 'geometry': {'type': 'Point', 'coordinates': [2.0, 3.0]}, 'properties': {'title': 'Two'}}]) with fiona.open(path) as c: self.assertEquals(c.schema['geometry'], 'Point') self.assertEquals(len(c), 2) Fiona-1.7.10/tests/test_integration.py000066400000000000000000000027201317446052300177400ustar00rootroot00000000000000"""Unittests to verify Fiona is functioning properly with other software.""" import collections import os import shutil import tempfile import unittest import six import fiona class TestCRSNonDict(unittest.TestCase): @classmethod def setUpClass(self): self.tempdir = tempfile.mkdtemp() @classmethod def tearDownClass(self): shutil.rmtree(self.tempdir) def test_dict_subclass(self): """Rasterio now has a `CRS()` class that subclasses `collections.UserDict()`. Make sure we can receive it. `UserDict()` is a good class to test against because in Python 2 it is not a subclass of `collections.Mapping()`, so it provides an edge case. """ class CRS(six.moves.UserDict): pass outfile = os.path.join(self.tempdir, 'test_UserDict.geojson') profile = { 'crs': CRS(init='EPSG:4326'), 'driver': 'GeoJSON', 'schema': { 'geometry': 'Point', 'properties': {} } } with fiona.open(outfile, 'w', **profile) as dst: dst.write({ 'type': 'Feature', 'properties': {}, 'geometry': { 'type': 'Point', 'coordinates': (10, -10) } }) with fiona.open(outfile) as src: assert len(src) == 1 assert src.crs == {'init': 'epsg:4326'} Fiona-1.7.10/tests/test_layer.py000066400000000000000000000042111317446052300165260ustar00rootroot00000000000000import logging import os import shutil import sys import tempfile import unittest import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) from .test_collection import ReadingTest def test_index_selection(): with fiona.open('tests/data/coutwildrnp.shp', 'r', layer=0) as c: assert len(c) == 67 class FileReadingTest(ReadingTest): def setUp(self): self.c = fiona.open('tests/data/coutwildrnp.shp', 'r', layer='coutwildrnp') def tearDown(self): self.c.close() def test_open_repr(self): self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_name(self): self.assertEqual(self.c.name, 'coutwildrnp') class DirReadingTest(ReadingTest): def setUp(self): self.c = fiona.open("tests/data", "r", layer="coutwildrnp") def tearDown(self): self.c.close() def test_open_repr(self): self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_name(self): self.assertEqual(self.c.name, 'coutwildrnp') def test_path(self): self.assertEqual(self.c.path, "tests/data") class InvalidLayerTest(unittest.TestCase): def test_invalid(self): self.assertRaises(ValueError, fiona.open, ("tests/data/coutwildrnp.shp"), layer="foo") def test_write_numeric_layer(self): self.assertRaises(ValueError, fiona.open, (os.path.join(tempfile.gettempdir(), "test-no-iter.shp")), mode='w', layer=0) Fiona-1.7.10/tests/test_listing.py000066400000000000000000000031351317446052300170670ustar00rootroot00000000000000import logging import os import shutil import sys import unittest import fiona import fiona.ogrext FIXME_WINDOWS = sys.platform.startswith("win") logging.basicConfig(stream=sys.stderr, level=logging.INFO) def test_single_file_private(): with fiona.drivers(): assert fiona.ogrext._listlayers('tests/data/coutwildrnp.shp') == ['coutwildrnp'] def test_single_file(): assert fiona.listlayers('tests/data/coutwildrnp.shp') == ['coutwildrnp'] def test_directory(): assert fiona.listlayers('tests/data') == ['coutwildrnp'] @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. ValueError raised. Please look into why this test isn't working.") def test_directory_trailing_slash(): assert fiona.listlayers('tests/data/') == ['coutwildrnp'] def test_zip_path(): assert fiona.listlayers('zip://tests/data/coutwildrnp.zip') == ['coutwildrnp'] def test_zip_path_arch(): assert fiona.listlayers('/coutwildrnp.shp', vfs='zip://tests/data/coutwildrnp.zip') == ['coutwildrnp'] class ListLayersArgsTest(unittest.TestCase): def test_path(self): self.assertRaises(TypeError, fiona.listlayers, (1)) def test_vfs(self): self.assertRaises(TypeError, fiona.listlayers, ("/"), vfs=1) def test_path_ioerror(self): self.assertRaises(IOError, fiona.listlayers, ("foobar")) def test_parse_path(): assert fiona.parse_paths("zip://foo.zip") == ("foo.zip", "zip", None) def test_parse_path2(): assert fiona.parse_paths("foo") == ("foo", None, None) def test_parse_vfs(): assert fiona.parse_paths("/", "zip://foo.zip") == ("/", "zip", "foo.zip") Fiona-1.7.10/tests/test_multiconxn.py000066400000000000000000000110241317446052300176120ustar00rootroot00000000000000import logging import os import shutil import sys import tempfile import unittest import fiona from fiona.compat import OrderedDict logging.basicConfig(stream=sys.stderr, level=logging.INFO) FIXME_WINDOWS = sys.platform.startswith("win") class ReadAccess(unittest.TestCase): # To check that we'll be able to get multiple 'r' connections to layers # in a single file. def setUp(self): self.c = fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") def tearDown(self): self.c.close() def test_meta(self): with fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") as c2: self.assertEqual(len(self.c), len(c2)) self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) def test_meta(self): f1 = next(iter(self.c)) with fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") as c2: f2 = next(iter(c2)) self.assertEqual(f1, f2) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. These tests raise PermissionErrors on Windows in Python 3.x (which doesn't exist in Python 2.7). Please look into why this test isn't working.") class ReadWriteAccess(unittest.TestCase): # To check that we'll be able to read from a file that we're # writing to. def setUp(self): self.tempdir = tempfile.mkdtemp() self.c = fiona.open( os.path.join(self.tempdir, "multi_write_test.shp"), "w", driver="ESRI Shapefile", schema={ 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') self.f = { 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_meta(self): c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") self.assertEqual(len(self.c), len(c2)) self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) def test_read(self): c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") f2 = next(iter(c2)) del f2['id'] self.assertEqual(self.f, f2) def test_read_after_close(self): c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") self.c.close() f2 = next(iter(c2)) del f2['id'] self.assertEqual(self.f, f2) @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. These tests raise PermissionErrors on Windows in Python 3.x (which doesn't exist in Python 2.7). Please look into why this test isn't working.") class LayerCreation(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() self.dir = os.path.join(self.tempdir, 'layer_creation') if os.path.exists(self.dir): shutil.rmtree(self.dir) os.mkdir(self.dir) self.c = fiona.open( self.dir, 'w', layer='write_test', driver='ESRI Shapefile', schema={ 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') self.f = { 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_meta(self): c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") self.assertEqual(len(self.c), len(c2)) self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) def test_read(self): c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") f2 = next(iter(c2)) del f2['id'] self.assertEqual(self.f, f2) def test_read_after_close(self): c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") self.c.close() f2 = next(iter(c2)) del f2['id'] self.assertEqual(self.f, f2) Fiona-1.7.10/tests/test_non_counting_layer.py000066400000000000000000000015131317446052300213100ustar00rootroot00000000000000import unittest import fiona GPX_FILE = 'tests/data/test_gpx.gpx' class NonCountingLayerTest(unittest.TestCase): def setUp(self): self.c = fiona.open(GPX_FILE, "r", layer="track_points") def tearDown(self): self.c.close() def test_len_fail(self): with self.assertRaises(TypeError): len(self.c) def test_list(self): features = list(self.c) self.assertEqual(len(features), 19) def test_getitem(self): feature = self.c[2] def test_fail_getitem_negative_index(self): with self.assertRaises(IndexError): self.c[-1] def test_slice(self): features = self.c[2:5] self.assertEqual(len(features), 3) def test_fail_slice_negative_index(self): with self.assertRaises(IndexError): self.c[2:-4] Fiona-1.7.10/tests/test_profile.py000066400000000000000000000015531317446052300170600ustar00rootroot00000000000000import os import tempfile import fiona def test_profile(): with fiona.open('tests/data/coutwildrnp.shp') as src: assert src.meta['crs_wkt'] == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' def test_profile_creation_wkt(): tmpdir = tempfile.mkdtemp() outfilename = os.path.join(tmpdir, 'test.shp') with fiona.open('tests/data/coutwildrnp.shp') as src: profile = src.meta profile['crs'] = 'bogus' with fiona.open(outfilename, 'w', **profile) as dst: assert dst.crs == {'init': 'epsg:4326'} assert dst.crs_wkt == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' Fiona-1.7.10/tests/test_props.py000066400000000000000000000107461317446052300165670ustar00rootroot00000000000000import json import os.path from six import text_type import tempfile import fiona from fiona import prop_type, prop_width from fiona.rfc3339 import FionaDateType def test_width_str(): assert prop_width('str:254') == 254 assert prop_width('str') == 80 def test_width_other(): assert prop_width('int') == None assert prop_width('float') == None assert prop_width('date') == None def test_types(): assert prop_type('str:254') == text_type assert prop_type('str') == text_type assert isinstance(0, prop_type('int')) assert isinstance(0.0, prop_type('float')) assert prop_type('date') == FionaDateType def test_read_json_object_properties(): """JSON object properties are properly serialized""" data = """ { "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ 87.33588, 43.53139 ], [ 87.33588, 45.66894 ], [ 90.27542, 45.66894 ], [ 90.27542, 43.53139 ], [ 87.33588, 43.53139 ] ] ] }, "type": "Feature", "properties": { "upperLeftCoordinate": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" } } ] } """ tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with open(filename, 'w') as f: f.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) props = ftr['properties'] assert props['upperLeftCoordinate']['latitude'] == 45.66894 assert props['upperLeftCoordinate']['longitude'] == 87.91166 assert props['tricky'] == "{gotcha" def test_write_json_object_properties(): """Python object properties are properly serialized""" data = """ { "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ 87.33588, 43.53139 ], [ 87.33588, 45.66894 ], [ 90.27542, 45.66894 ], [ 90.27542, 43.53139 ], [ 87.33588, 43.53139 ] ] ] }, "type": "Feature", "properties": { "upperLeftCoordinate": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" } } ] } """ data = json.loads(data)['features'][0] tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with fiona.open( filename, 'w', driver='GeoJSON', schema={ 'geometry': 'Polygon', 'properties': {'upperLeftCoordinate': 'str', 'tricky': 'str'}} ) as dst: dst.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) props = ftr['properties'] assert props['upperLeftCoordinate']['latitude'] == 45.66894 assert props['upperLeftCoordinate']['longitude'] == 87.91166 assert props['tricky'] == "{gotcha" def test_json_prop_decode_non_geojson_driver(): feature = { "type": "Feature", "properties": { "ulc": { "latitude": 45.66894, "longitude": 87.91166 }, "tricky": "{gotcha" }, "geometry": { "type": "Point", "coordinates": [10, 15] } } meta = { 'crs': 'EPSG:4326', 'driver': 'ESRI Shapefile', 'schema': { 'geometry': 'Point', 'properties': { 'ulc': 'str:255', 'tricky': 'str:255' } } } tmpdir = tempfile.mkdtemp() filename = os.path.join(tmpdir, 'test.json') with fiona.open(filename, 'w', **meta) as dst: dst.write(feature) with fiona.open(filename) as src: actual = next(iter(src)) assert isinstance(actual['properties']['ulc'], text_type) a = json.loads(actual['properties']['ulc']) e = json.loads(actual['properties']['ulc']) assert e == a assert actual['properties']['tricky'].startswith('{') Fiona-1.7.10/tests/test_read_drivers.py000066400000000000000000000013501317446052300200640ustar00rootroot00000000000000import logging import sys import pytest import fiona from fiona.errors import FionaValueError logging.basicConfig(stream=sys.stderr, level=logging.INFO) def test_read_fail(): with pytest.raises(FionaValueError): fiona.open('tests/data/coutwildrnp.shp', driver='GeoJSON') with pytest.raises(FionaValueError): fiona.open('tests/data/coutwildrnp.shp', enabled_drivers=['GeoJSON']) def test_read(): with fiona.open( 'tests/data/coutwildrnp.shp', driver='ESRI Shapefile') as src: assert src.driver == 'ESRI Shapefile' with fiona.open( 'tests/data/coutwildrnp.shp', enabled_drivers=['GeoJSON', 'ESRI Shapefile']) as src: assert src.driver == 'ESRI Shapefile' Fiona-1.7.10/tests/test_remove.py000066400000000000000000000041411317446052300167110ustar00rootroot00000000000000import logging import sys import os import tempfile import pytest import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def create_sample_data(filename, driver): meta = { 'driver': driver, 'schema': { 'geometry': 'Point', 'properties': {} } } with fiona.open(filename, 'w', **meta) as dst: dst.write({ 'geometry': { 'type': 'Point', 'coordinates': (0, 0), }, 'properties': {}, }) assert(os.path.exists(filename)) def test_remove(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() filename_shp = os.path.join(tmpdir, 'test.shp') create_sample_data(filename_shp, driver='ESRI Shapefile') fiona.remove(filename_shp, driver='ESRI Shapefile') assert(not os.path.exists(filename_shp)) with pytest.raises(RuntimeError): fiona.remove(filename_shp, driver='ESRI Shapefile') def test_remove_driver(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() filename_shp = os.path.join(tmpdir, 'test.shp') filename_json = os.path.join(tmpdir, 'test.json') create_sample_data(filename_shp, driver='ESRI Shapefile') create_sample_data(filename_json, driver='GeoJSON') fiona.remove(filename_json, driver='GeoJSON') assert(not os.path.exists(filename_json)) assert(os.path.exists(filename_shp)) def test_remove_collection(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() filename_shp = os.path.join(tmpdir, 'test.shp') create_sample_data(filename_shp, driver='ESRI Shapefile') collection = fiona.open(filename_shp, 'r') fiona.remove(collection) assert(not os.path.exists(filename_shp)) def test_remove_path_without_driver(tmpdir=None): if tmpdir is None: tmpdir = tempfile.mkdtemp() filename_shp = os.path.join(tmpdir, 'test.shp') create_sample_data(filename_shp, driver='ESRI Shapefile') with pytest.raises(Exception): fiona.remove(filename_shp) assert(os.path.exists(filename_shp)) Fiona-1.7.10/tests/test_revolvingdoor.py000066400000000000000000000014631317446052300203170ustar00rootroot00000000000000# Test of opening and closing and opening import logging import os.path import shutil import subprocess import sys import tempfile import unittest import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) log = logging.getLogger('fiona.tests') class RevolvingDoorTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_write_revolving_door(self): with fiona.open('tests/data/coutwildrnp.shp') as src: meta = src.meta features = list(src) shpname = os.path.join(self.tempdir, 'foo.shp') with fiona.open(shpname, 'w', **meta) as dst: dst.writerecords(features) with fiona.open(shpname) as src: pass Fiona-1.7.10/tests/test_rfc3339.py000066400000000000000000000033471317446052300165170ustar00rootroot00000000000000# testing Fiona's RFC 3339 support, to be called by nosetests import logging import re import sys import unittest from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import group_accessor, pattern_date logging.basicConfig(stream=sys.stderr, level=logging.INFO) class DateParseTest(unittest.TestCase): def test_yyyymmdd(self): self.assertEqual( parse_date("2012-01-29"), (2012, 1, 29, 0, 0, 0, 0.0)) def test_error(self): self.assertRaises(ValueError, parse_date, ("xxx")) class TimeParseTest(unittest.TestCase): def test_hhmmss(self): self.assertEqual( parse_time("10:11:12"), (0, 0, 0, 10, 11, 12, 0.0)) def test_hhmm(self): self.assertEqual( parse_time("10:11"), (0, 0, 0, 10, 11, 0, 0.0)) def test_hhmmssff(self): self.assertEqual( parse_time("10:11:12.42"), (0, 0, 0, 10, 11, 12, 0.42*1000000.0)) def test_hhmmssz(self): self.assertEqual( parse_time("10:11:12Z"), (0, 0, 0, 10, 11, 12, 0.0)) def test_hhmmssoff(self): self.assertEqual( parse_time("10:11:12-01:00"), (0, 0, 0, 10, 11, 12, 0.0)) def test_error(self): self.assertRaises(ValueError, parse_time, ("xxx")) class DatetimeParseTest(unittest.TestCase): def test_yyyymmdd(self): self.assertEqual( parse_datetime("2012-01-29T10:11:12"), (2012, 1, 29, 10, 11, 12, 0.0)) def test_error(self): self.assertRaises(ValueError, parse_datetime, ("xxx")) def test_group_accessor_indexerror(): match = re.search(pattern_date, '2012-01-29') g = group_accessor(match) assert g.group(-1) == 0 assert g.group(6) == 0 Fiona-1.7.10/tests/test_schema.py000066400000000000000000000146321317446052300166620ustar00rootroot00000000000000import os import shutil import tempfile import unittest import fiona from fiona.errors import UnsupportedGeometryTypeError class SchemaOrder(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_schema_ordering_items(self): items = [('title', 'str:80'), ('date', 'date')] with fiona.open(os.path.join(self.tempdir, 'test_schema.shp'), 'w', driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': items }) as c: self.assertEqual(list(c.schema['properties'].items()), items) with fiona.open(os.path.join(self.tempdir, 'test_schema.shp')) as c: self.assertEqual(list(c.schema['properties'].items()), items) class ShapefileSchema(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_schema(self): items = sorted({ 'AWATER10': 'float', 'CLASSFP10': 'str', 'ZipCodeType': 'str', 'EstimatedPopulation': 'float', 'LocationType': 'str', 'ALAND10': 'float', 'TotalWages': 'float', 'FUNCSTAT10': 'str', 'Long': 'float', 'City': 'str', 'TaxReturnsFiled': 'float', 'State': 'str', 'Location': 'str', 'GSrchCnt': 'float', 'INTPTLAT10': 'str', 'Lat': 'float', 'MTFCC10': 'str', 'Decommisioned': 'str', 'GEOID10': 'str', 'INTPTLON10': 'str'}.items()) with fiona.open(os.path.join(self.tempdir, 'test_schema.shp'), 'w', driver="ESRI Shapefile", schema={ 'geometry': 'Polygon', 'properties': items }) as c: self.assertEqual(list(c.schema['properties'].items()), items) c.write( {'geometry': {'coordinates': [[(-117.882442, 33.783633), (-117.882284, 33.783817), (-117.863348, 33.760016), (-117.863478, 33.760016), (-117.863869, 33.760017), (-117.864, 33.760017999999995), (-117.864239, 33.760019), (-117.876608, 33.755769), (-117.882886, 33.783114), (-117.882688, 33.783345), (-117.882639, 33.783401999999995), (-117.88259, 33.78346), (-117.882442, 33.783633)]], 'type': 'Polygon'}, 'id': '1', 'properties':{ 'ALAND10': 8819240.0, 'AWATER10': 309767.0, 'CLASSFP10': 'B5', 'City': 'SANTA ANA', 'Decommisioned': False, 'EstimatedPopulation': 27773.0, 'FUNCSTAT10': 'S', 'GEOID10': '92706', 'GSrchCnt': 0.0, 'INTPTLAT10': '+33.7653010', 'INTPTLON10': '-117.8819759', 'Lat': 33.759999999999998, 'Location': 'NA-US-CA-SANTA ANA', 'LocationType': 'PRIMARY', 'Long': -117.88, 'MTFCC10': 'G6350', 'State': 'CA', 'TaxReturnsFiled': 14635.0, 'TotalWages': 521280485.0, 'ZipCodeType': 'STANDARD'}, 'type': 'Feature'} ) self.assertEqual(len(c), 1) with fiona.open(os.path.join(self.tempdir, 'test_schema.shp')) as c: self.assertEqual( list(c.schema['properties'].items()), sorted([('AWATER10', 'float:24.15'), ('CLASSFP10', 'str:80'), ('ZipCodeTyp', 'str:80'), ('EstimatedP', 'float:24.15'), ('LocationTy', 'str:80'), ('ALAND10', 'float:24.15'), ('INTPTLAT10', 'str:80'), ('FUNCSTAT10', 'str:80'), ('Long', 'float:24.15'), ('City', 'str:80'), ('TaxReturns', 'float:24.15'), ('State', 'str:80'), ('Location', 'str:80'), ('GSrchCnt', 'float:24.15'), ('TotalWages', 'float:24.15'), ('Lat', 'float:24.15'), ('MTFCC10', 'str:80'), ('INTPTLON10', 'str:80'), ('GEOID10', 'str:80'), ('Decommisio', 'str:80')]) ) f = next(iter(c)) self.assertEqual(f['properties']['EstimatedP'], 27773.0) class FieldTruncationTestCase(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_issue177(self): name = os.path.join(self.tempdir, 'output.shp') kwargs = { 'driver': 'ESRI Shapefile', 'crs': 'EPSG:4326', 'schema': { 'geometry': 'Point', 'properties': [('a_fieldname', 'float')]}} with fiona.open(name, 'w', **kwargs) as dst: rec = {} rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} rec['properties'] = {'a_fieldname': 3.0} dst.write(rec) with fiona.open(name) as src: first = next(iter(src)) assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)} assert first['properties']['a_fieldnam'] == 3.0 def test_unsupported_geometry_type(): tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test-test-geom.shp') profile = { 'driver': 'ESRI Shapefile', 'schema': { 'geometry': 'BOGUS', 'properties': {}}} try: fiona.open(tmpfile, 'w', **profile) except UnsupportedGeometryTypeError: assert True Fiona-1.7.10/tests/test_slice.py000066400000000000000000000037461317446052300165250ustar00rootroot00000000000000import logging import sys import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) def test_collection_get(): with fiona.open('tests/data/coutwildrnp.shp') as src: result = src[5] assert result['id'] == '5' def test_collection_slice(): with fiona.open('tests/data/coutwildrnp.shp') as src: results = src[:5] assert isinstance(results, list) assert len(results) == 5 assert results[4]['id'] == '4' def test_collection_iterator_slice(): with fiona.open('tests/data/coutwildrnp.shp') as src: results = list(src.items(5)) assert len(results) == 5 k, v = results[4] assert k == 4 assert v['id'] == '4' def test_collection_iterator_next(): with fiona.open('tests/data/coutwildrnp.shp') as src: k, v = next(src.items(5, None)) assert k == 5 assert v['id'] == '5' def test_collection_iterator_items_slice(): with fiona.open('tests/data/coutwildrnp.shp') as src: l = len(src) items = list(src.items(0, 5)) assert len(items) == 5 items = list(src.items(1, 5)) assert len(items) == 4 items = list(src.items(-5, None)) assert len(items) == 5 items = list(src.items(-5, -1)) assert len(items) == 4 items = list(src.items(0, None)) assert len(items) == l items = list(src.items(5, None)) assert len(items) == (l - 5) items = list(src.items(5, None, -1)) assert len(items) == 6 items = list(src.items(5, None, -2)) assert len(items) == 3 items = list(src.items(4, None, -2)) assert len(items) == 3 items = list(src.items(-1, -5, -1)) assert len(items) == 4 items = list(src.items(-5, None, -1)) assert len(items) == (l - 5 + 1) def test_collection_iterator_keys_next(): with fiona.open('tests/data/coutwildrnp.shp') as src: k = next(src.keys(5, None)) assert k == 5 Fiona-1.7.10/tests/test_unicode.py000066400000000000000000000110001317446052300170320ustar00rootroot00000000000000# coding: utf-8 import logging import os import shutil import sys import tempfile import unittest import pytest import six import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) FIXME_WINDOWS = sys.platform.startswith('win') class UnicodePathTest(unittest.TestCase): def setUp(self): tempdir = tempfile.mkdtemp() self.dir = os.path.join(tempdir, 'français') shutil.copytree('tests/data/', self.dir) def tearDown(self): shutil.rmtree(os.path.dirname(self.dir)) def test_unicode_path(self): path = self.dir + '/coutwildrnp.shp' if sys.version_info < (3,): path = path.decode('utf-8') with fiona.open(path) as c: assert len(c) == 67 def test_unicode_path_layer(self): path = self.dir layer = 'coutwildrnp' if sys.version_info < (3,): path = path.decode('utf-8') layer = layer.decode('utf-8') with fiona.open(path, layer=layer) as c: assert len(c) == 67 def test_utf8_path(self): path = self.dir + '/coutwildrnp.shp' if sys.version_info < (3,): with fiona.open(path) as c: assert len(c) == 67 @unittest.skipIf(FIXME_WINDOWS, reason="FIXME on Windows. Please look into why these tests are not working. Note: test_write_utf8 works.") class UnicodeStringFieldTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) @pytest.mark.xfail(reason="OGR silently fails to convert strings") def test_write_mismatch(self): """TOFIX: OGR silently fails to convert strings""" # Details: # # If we tell OGR that we want a latin-1 encoded output file and # give it a feature with a unicode property that can't be converted # to latin-1, no error is raised and OGR just writes the utf-8 # encoded bytes to the output file. # # This might be shapefile specific. # # Consequences: no error on write, but there will be an error # on reading the data and expecting latin-1. schema = { 'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write-fail.shp"), 'w', driver="ESRI Shapefile", schema=schema, encoding='latin1') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': { 'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: f = next(iter(c)) # Next assert fails. self.assertEqual(f['properties']['label'], u'徐汇区') def test_write_utf8(self): schema = { 'geometry': 'Point', 'properties': {'label': 'str', u'verit\xe9': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write.shp"), "w", "ESRI Shapefile", schema=schema, encoding='utf-8') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': { 'label': u'Ba\u2019kelalan', u'verit\xe9': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='utf-8') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'Ba\u2019kelalan') self.assertEqual(f['properties'][u'verit\xe9'], 0) def test_write_gb18030(self): """Can write a simplified Chinese shapefile""" schema = { 'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, "test-write-gb18030.shp"), 'w', driver="ESRI Shapefile", schema=schema, encoding='gb18030') as c: c.writerecords([{ 'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'徐汇区') self.assertEqual(f['properties']['num'], 0) Fiona-1.7.10/tests/test_vfs.py000066400000000000000000000073221317446052300162160ustar00rootroot00000000000000import logging import os import shutil import sys import unittest import fiona logging.basicConfig(stream=sys.stderr, level=logging.INFO) from .test_collection import ReadingTest class VsiReadingTest(ReadingTest): # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 # in which the VSI driver reports the wrong number of features. # I'm overriding ReadingTest's test_filter_1 with a function that # passes and creating a new method in this class that we can exclude # from the test runner at run time. def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) self.assertEqual(len(results), 67) f = results[0] self.assertEqual(f['id'], "0") self.assertEqual(f['properties']['STATE'], 'UT') class ZipReadingTest(VsiReadingTest): def setUp(self): self.c = fiona.open("zip://tests/data/coutwildrnp.zip", "r") def tearDown(self): self.c.close() def test_open_repr(self): self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_path(self): self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip') class ZipArchiveReadingTest(VsiReadingTest): def setUp(self): self.c = fiona.open("/coutwildrnp.shp", "r", vfs="zip://tests/data/coutwildrnp.zip") def tearDown(self): self.c.close() def test_open_repr(self): self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_path(self): self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp') class ZipArchiveReadingTestAbsPath(ZipArchiveReadingTest): def setUp(self): self.c = fiona.open( "/coutwildrnp.shp", "r", vfs="zip://" + os.path.abspath("tests/data/coutwildrnp.zip")) def test_open_repr(self): self.assert_(repr(self.c).startswith("" % hex(id(self.c)))) def test_closed_repr(self): self.c.close() self.assertEqual( repr(self.c), ("" % hex(id(self.c)))) def test_path(self): self.assertEqual(self.c.path, '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp')