pax_global_header00006660000000000000000000000064147347512250014524gustar00rootroot0000000000000052 comment=25743c9c5472f1d77a76607e3364418919d1f25e pygeofilter-0.3.1/000077500000000000000000000000001473475122500140565ustar00rootroot00000000000000pygeofilter-0.3.1/.github/000077500000000000000000000000001473475122500154165ustar00rootroot00000000000000pygeofilter-0.3.1/.github/workflows/000077500000000000000000000000001473475122500174535ustar00rootroot00000000000000pygeofilter-0.3.1/.github/workflows/main.yml000066400000000000000000000031401473475122500211200ustar00rootroot00000000000000name: build ⚙️ on: [ push, pull_request ] jobs: test: runs-on: ubuntu-20.04 strategy: matrix: python-version: ['3.8', '3.9', '3.10'] steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 name: Setup Python ${{ matrix.python-version }} with: python-version: ${{ matrix.python-version }} cache: pip - name: Install requirements run: | sudo apt-get update sudo apt-get install -y binutils libproj-dev gdal-bin libgdal-dev libsqlite3-mod-spatialite spatialite-bin pip install -r requirements-test.txt pip install -r requirements-dev.txt pip install pygdal=="`gdal-config --version`.*" pip install . - name: Configure sysctl limits run: | sudo swapoff -a sudo sysctl -w vm.swappiness=1 sudo sysctl -w fs.file-max=262144 sudo sysctl -w vm.max_map_count=262144 - name: Install and run Elasticsearch 📦 uses: getong/elasticsearch-action@v1.2 with: elasticsearch version: '8.2.2' host port: 9200 container port: 9200 host node port: 9300 node port: 9300 discovery type: 'single-node' - name: Install and run OpenSearch 📦 uses: esmarkowski/opensearch-github-action@v1.0.0 with: version: 2.18.0 security-disabled: true port: 9209 - name: Run unit tests run: | pytest # - name: run pre-commit (code formatting, lint and type checking) # run: | # python -m pip install pre-commit # pre-commit run --all-files pygeofilter-0.3.1/.github/workflows/publish.yml000066400000000000000000000012221473475122500216410ustar00rootroot00000000000000name: publish on: push: tags: - release-* - "v*" jobs: publish: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 name: Setup Python with: python-version: "3.x" - name: Install build dependency run: pip install wheel setuptools - name: Build package run: python setup.py sdist bdist_wheel --universal - name: Publish package if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} pygeofilter-0.3.1/.github/workflows/release-please.yml000066400000000000000000000004061473475122500230650ustar00rootroot00000000000000on: push: branches: - main name: release-please jobs: release-please: runs-on: ubuntu-latest steps: - uses: googleapis/release-please-action@v4 with: token: ${{ secrets.PAT_WORKFLOW }} release-type: python pygeofilter-0.3.1/.gitignore000066400000000000000000000034411473475122500160500ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ .doctrees .vscode .idea pygeofilter-0.3.1/.pre-commit-config.yaml000066400000000000000000000014671473475122500203470ustar00rootroot00000000000000repos: - repo: https://github.com/psf/black rev: 24.4.2 hooks: - id: black language_version: python - repo: https://github.com/PyCQA/isort rev: 5.13.2 hooks: - id: isort language_version: python - repo: https://github.com/PyCQA/flake8 rev: 7.0.0 hooks: - id: flake8 language_version: python - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.10.0 hooks: - id: mypy language_version: python args: [--install-types, --non-interactive] # N.B.: Mypy is... a bit fragile. # ref: https://github.com/python/mypy/issues/4008 # The issue is that we have too many evaluate.py or parser.py and mypy believe they are all the same # when run within pre-commit files: ^pygeofilter* pygeofilter-0.3.1/.readthedocs.yaml000066400000000000000000000006341473475122500173100ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py # Optionally set requirements required to build your docs python: install: - requirements: docs/requirements.txt - requirements: requirements-test.txt pygeofilter-0.3.1/CHANGELOG.md000066400000000000000000000152311473475122500156710ustar00rootroot00000000000000# Changelog ## [0.3.1](https://github.com/geopython/pygeofilter/compare/v0.3.0...v0.3.1) (2024-12-31) ### Bug Fixes * **CI:** using separate file for tracking version to help with release-please action ([1c28b7c](https://github.com/geopython/pygeofilter/commit/1c28b7c45415ecedabd01570b114902f1d8f9310)) ## [0.3.0](https://github.com/geopython/pygeofilter/compare/v0.2.4...v0.3.0) (2024-12-30) ### Features * add support for OpenSearch backend ([#111](https://github.com/geopython/pygeofilter/pull/111)) * Update lark ([#110](https://github.com/geopython/pygeofilter/pull/110)) ### Bug Fixes * Handle boolean in ecql like cql_text ([#108](https://github.com/geopython/pygeofilter/pull/108)) * Fix compatibility with i386 ([#107](https://github.com/geopython/pygeofilter/pull/107)) * add FES parser import shortcut as other filter languages ([#102](https://github.com/geopython/pygeofilter/pull/102)) ### Miscellaneous Chores * release 0.3.0 ([48de1f1](https://github.com/geopython/pygeofilter/commit/48de1f128c4956a99d6760487146636122e119a3)) ## [0.2.4](https://github.com/geopython/pygeofilter/compare/v0.2.3...v0.2.4) (2024-07-10) ### Bug Fixes * bumping version to 0.2.4 ([21bc095](https://github.com/geopython/pygeofilter/commit/21bc0957c84244b7d39dbe164f00d143d952c684)) ## [0.2.3](https://github.com/geopython/pygeofilter/compare/v0.2.2...v0.2.3) (2024-07-10) ### Bug Fixes * adding dependency for publishing packages ([249926e](https://github.com/geopython/pygeofilter/commit/249926ef2ebe264b616ce0f039a8b0e1b8626dda)) ## [0.2.2](https://github.com/geopython/pygeofilter/compare/v0.2.1...v0.2.2) (2024-07-10) ### Bug Fixes * [#85](https://github.com/geopython/pygeofilter/issues/85) ([2f1a38f](https://github.com/geopython/pygeofilter/commit/2f1a38f8bc9dfe2ebf5c318c6121d7f51029a9cf)) * Addresses [#95](https://github.com/geopython/pygeofilter/issues/95). ([d51dbb0](https://github.com/geopython/pygeofilter/commit/d51dbb0eb7a1066bd97b81cffe99da11ebf3cba4)) * Addresses [#95](https://github.com/geopython/pygeofilter/issues/95). ([2a51990](https://github.com/geopython/pygeofilter/commit/2a519904c4ac408fabb39459104efcc3e09f3a40)) * Bump pre-commit dependencies ([90f4aaa](https://github.com/geopython/pygeofilter/commit/90f4aaaafe873c69b0ccd91e897a9ff218ef5110)) * Bump pre-commit dependencies ([64f7f96](https://github.com/geopython/pygeofilter/commit/64f7f962476665d4ae4eed750099a6c887ad21ca)) * Bump pre-commit dependencies ([11f1f9a](https://github.com/geopython/pygeofilter/commit/11f1f9ab71811da758aa67b13aeb2f0cce7aaa10)) * Enable custom handling of undefined field attr in to_filter ([23f172c](https://github.com/geopython/pygeofilter/commit/23f172cf1dd1ddb19791a761f128b001e887b361)) * Enable custom handling of undefined field attr in to_filter ([f0c7e9f](https://github.com/geopython/pygeofilter/commit/f0c7e9f36d55d80e1d17917a627ae5547c80363c)) * Enable custom handling of undefined field attr in to_filter ([d829c6b](https://github.com/geopython/pygeofilter/commit/d829c6be5254a45689d8bcdb52b28b8a5ed3b5b2)) * Support prefixed attribute names in cql2-text and ecql parsing ([dbe4e9e](https://github.com/geopython/pygeofilter/commit/dbe4e9e5c0c48698f312e1cc023a43ea78391f60)) * Support prefixed attribute names in cql2-text and ecql parsing ([5318c6b](https://github.com/geopython/pygeofilter/commit/5318c6bcf6e2620d39c8bc52fa13cc40e02274ac)) * Support prefixed attribute names in cql2-text and ecql parsing ([122a5a6](https://github.com/geopython/pygeofilter/commit/122a5a6c5ba746a51bf9eb36a5d9617201d19123)) * Updating release-please to v4 ([11757ec](https://github.com/geopython/pygeofilter/commit/11757eca4a7ba71fbca575636117b6eb8b3c9e53)) ### [0.2.1](https://www.github.com/geopython/pygeofilter/compare/v0.2.0...v0.2.1) (2023-02-16) ### Bug Fixes * dt naivety ([08fb5f5](https://www.github.com/geopython/pygeofilter/commit/08fb5f5f8b0a5ee39443a6233d558bbacadb5acb)) * order of date/datetime checking in native evaluator ([d37d7c8](https://www.github.com/geopython/pygeofilter/commit/d37d7c8cb483fdb9ff53ff9f871d5a8f85a227e1)) * pinning sqlalchemy to version < 2.0.0 ([6e67239](https://www.github.com/geopython/pygeofilter/commit/6e67239eb1af9a77599bbbc8cee211c9f906d95e)) * timezone handling for dates ([6c0e5c1](https://www.github.com/geopython/pygeofilter/commit/6c0e5c17ce5dde2dc541ccd6411c55d2a86e52ec)) ## [0.2.0](https://www.github.com/geopython/pygeofilter/compare/v0.1.2...v0.2.0) (2022-10-17) ### Features * adding initial elasticsearch implmentation ([2ccfa02](https://www.github.com/geopython/pygeofilter/commit/2ccfa02d5fcf1ee1f3be76f5cf375ace2556fa6c)) ### [0.1.2](https://www.github.com/geopython/pygeofilter/compare/v0.1.1...v0.1.2) (2022-04-21) ### Bug Fixes * Allowing intervals to actually contain subnodes ([83b7c63](https://www.github.com/geopython/pygeofilter/commit/83b7c63ad9233a9ed600f061d3b8e074291dcb8c)) ### [0.1.1](https://www.github.com/geopython/pygeofilter/compare/v0.1.0...v0.1.1) (2022-02-08) ### Bug Fixes * Fixing compatibility issues with Python 3.6 type checking ([ad7ddd7](https://www.github.com/geopython/pygeofilter/commit/ad7ddd7a332f838fa284e1493f0d3cc15036ad95)) * Improved typing ([2272b3b](https://www.github.com/geopython/pygeofilter/commit/2272b3b9371ff90fe5cbc9b8f84cbf6bb5cca76a)) * Improving structure of CI for type checking ([fb755a3](https://www.github.com/geopython/pygeofilter/commit/fb755a3859baf3a07f57938da2259b5c3fb74575)) * Improving typing ([6c3584b](https://www.github.com/geopython/pygeofilter/commit/6c3584b3961fe90cc07f08f6cc8f2256112850f3)) * Improving typing on CQL2 JSON ([e0747aa](https://www.github.com/geopython/pygeofilter/commit/e0747aa2d0dbcaedd49bd9bcf30e702da68aaa37)) * more concise type checking ([87e46a2](https://www.github.com/geopython/pygeofilter/commit/87e46a2c325fb5f1c1c92408369efdf263f387db)) * mypy dependency installation (using --non-interactive) ([84a1175](https://www.github.com/geopython/pygeofilter/commit/84a11752c48773650a063a767eb97a1fa149b0ac)) * Split up Django spatial filters ([484e0b3](https://www.github.com/geopython/pygeofilter/commit/484e0b3db483db76b6456593a33ee8598f72813d)) ## [0.1.0](https://www.github.com/geopython/pygeofilter/compare/v0.1.0...v0.1.0) (2021-11-18) ### Features * Fixing release-please package name ([2b666fc](https://www.github.com/geopython/pygeofilter/commit/2b666fc5b09c2ff15fa954f035a342542aa3577f)) ### Miscellaneous Chores * release 0.1.0 ([d5e4971](https://www.github.com/geopython/pygeofilter/commit/d5e49718f7f2c7936649217b286ebad42b168a23)) ## 0.1.0 (2021-11-18) ### Features * Merge pull request [#34](https://www.github.com/geopython/pygeofilter/issues/34) from geopython/cql2_json ([5d439b2](https://www.github.com/geopython/pygeofilter/commit/5d439b277e12b883f3132d4972d2979a8aefd92e)) pygeofilter-0.3.1/CONTRIBUTING.md000066400000000000000000000125111473475122500163070ustar00rootroot00000000000000# Contributing to pygeofilter We welcome contributions to pygeofilter, in the form of issues, bug fixes, documentation or suggestions for enhancements. This document sets out our guidelines and best practices for such contributions. It's based on the [Contributing to pygeoapi](https://github.com/geopython/pygeoapi/blob/master/CONTRIBUTING.md) guide which is based on the [Contributing to Open Source Projects Guide](https://contribution-guide-org.readthedocs.io/). pygeofilter has the following modes of contribution: - GitHub Commit Access - GitHub Pull Requests ## Code of Conduct Contributors to this project are expected to act respectfully toward others in accordance with the [OSGeo Code of Conduct](https://www.osgeo.org/code_of_conduct). ## Submitting Bugs ### Due Diligence Before submitting a bug, please do the following: * Perform __basic troubleshooting__ steps: * __Make sure you're on the latest version.__ If you're not on the most recent version, your problem may have been solved already! Upgrading is always the best first step. * [__Search the issue tracker__](https://github.com/geopython/pygeofilter/issues) to make sure it's not a known issue. ### What to put in your bug report Make sure your report gets the attention it deserves: bug reports with missing information may be ignored or punted back to you, delaying a fix. The below constitutes a bare minimum; more info is almost always better: * __What version of Python are you using?__ For example, are you using Python 3.8+, PyPy 2.0? * __What operating system are you using?__ Windows (7, 8, 10, 32-bit, 64-bit), Mac OS X, (10.7.4, 10.9.0), GNU/Linux (which distribution, which version?) Again, more detail is better. * __Which version or versions of the software are you using?__ Ideally, you've followed the advice above and are on the latest version, but please confirm this. * __How can the we recreate your problem?__ Imagine that we have never used pygeofilter before and have downloaded it for the first time. Exactly what steps do we need to take to reproduce your problem? ## Contributions and Licensing ### Contributor License Agreement Your contribution will be under our [license](https://github.com/geopython/pygeofilter/blob/main/LICENSE) as per [GitHub's terms of service](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license). ### GitHub Commit Access * Proposals to provide developers with GitHub commit access shall be raised on the pygeofilter [discussions page](https://github.com/geopython/pygeofilter/discussions). Committers shall be added by the project admin. * Removal of commit access shall be handled in the same manner. ### GitHub Pull Requests * Pull requests may include copyright in the source code header by the contributor if the contribution is significant or the contributor wants to claim copyright on their contribution. * All contributors shall be listed at https://github.com/geopython/pygeofilter/graphs/contributors * Unclaimed copyright, by default, is assigned to the main copyright holders as specified in https://github.com/geopython/pygeofilter/blob/main/LICENSE ### Version Control Branching * Always __make a new branch__ for your work, no matter how small. This makes it easy for others to take just that one set of changes from your repository, in case you have multiple unrelated changes floating around. * __Don't submit unrelated changes in the same branch/pull request!__ If it is not possible to review your changes quickly and easily, we may reject your request. * __Base your new branch off of the appropriate branch__ on the main repository: * In general the released version of pygeofilter is based on the ``main`` (default) branch whereas development work is done under other non-default branches. Unless you are sure that your issue affects a non-default branch, __base your branch off the ``main`` one__. * Note that depending on how long it takes for the dev team to merge your patch, the copy of ``main`` you worked off of may get out of date! * If you find yourself 'bumping' a pull request that's been sidelined for a while, __make sure you rebase or merge to latest ``main``__ to ensure a speedier resolution. ### Documentation * documentation is managed in `docs/`, in reStructuredText format * [Sphinx](https://www.sphinx-doc.org) is used to generate the documentation * See the [reStructuredText Primer](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html) on rST markup and syntax ### Code Formatting * __Please follow the coding conventions and style used in the pygeofilter repository.__ * pygeofilter follows the [PEP-8](http://www.python.org/dev/peps/pep-0008/) guidelines * 80 characters * spaces, not tabs * pygeofilter, instead of PyGeoFilter, pygeoFilter, etc. #### **pre-commit** The project is using [`pre-commit`](https://pre-commit.com) to automatically run code formatting and type checking on new commits. Please install `pre-commit` and enable it on your environment before pushing new commits. ```bash # Install pre-commit pip install pre-commit # Enable pre-commit cd /pygeofilter pre-commit install # Optional - run pre-commit manually pre-commit run --all-files ``` ## Suggesting Enhancements We welcome suggestions for enhancements, but reserve the right to reject them if they do not follow future plans for pygeofilter. pygeofilter-0.3.1/Dockerfile-3.9000066400000000000000000000012021473475122500163520ustar00rootroot00000000000000FROM python:3.9-buster LABEL description="Test executor" ENV DEBIAN_FRONTEND noninteractive RUN apt-get update --fix-missing \ && apt-get install -y --no-install-recommends \ binutils \ libproj-dev \ gdal-bin \ libsqlite3-mod-spatialite \ spatialite-bin \ && rm -rf /var/lib/apt/lists/* RUN mkdir /app WORKDIR /app COPY requirements-test.txt . COPY requirements-dev.txt . RUN pip install -r requirements-test.txt RUN pip install -r requirements-dev.txt COPY pygeofilter pygeofilter COPY tests tests COPY README.md . COPY setup.py . RUN pip install -e . RUN chmod +x tests/execute-tests.sh CMD ["tests/execute-tests.sh"] pygeofilter-0.3.1/LICENSE000066400000000000000000000020521473475122500150620ustar00rootroot00000000000000MIT License Copyright (c) 2021 geopython Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pygeofilter-0.3.1/MANIFEST.in000066400000000000000000000001721473475122500156140ustar00rootroot00000000000000recursive-include pygeofilter *.py *.lark global-include *.lark include README.md include LICENSE include requirements.txtpygeofilter-0.3.1/README.md000066400000000000000000000310371473475122500153410ustar00rootroot00000000000000# pygeofilter pygeofilter is a pure Python parser implementation of OGC filtering standards [![PyPI version](https://badge.fury.io/py/pygeofilter.svg)](https://badge.fury.io/py/pygeofilter) [![Build Status](https://github.com/geopython/pygeofilter/workflows/build%20%E2%9A%99%EF%B8%8F/badge.svg)](https://github.com/geopython/pygeofilter/actions) [![Documentation Status](https://readthedocs.org/projects/pygeofilter/badge/?version=latest)](https://pygeofilter.readthedocs.io/en/latest/?badge=latest) ## Features * Parsing of several filter encoding standards * [CQL as defined in CSW 2.0](https://portal.ogc.org/files/?artifact_id=20555) * [CQL JSON as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-json-schema) * [JSON Filter Expressions (JFE)](https://github.com/tschaub/ogcapi-features/tree/json-array-expression/extensions/cql/jfe) * Soon: * [CQL Text as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-bnf) * [FES](http://docs.opengeospatial.org/is/09-026r2/09-026r2.html) * Several backends included * [Django](https://www.djangoproject.com/) * [SQLAlchemy](https://www.sqlalchemy.org/) * [(Geo)Pandas](https://pandas.pydata.org/) * Native Python objects ## Installation The package can be installed via PIP: ```bash pip install pygeofilter ``` Some features require additional dependencies. This currently only affects the backends. To install these, the features have to be listed: ```bash # for the Django backend pip install pygeofilter[backend-django] # for the sqlalchemy backend pip install pygeofilter[backend-sqlalchemy] # for the native backend pip install pygeofilter[backend-native] ``` ## Usage pygeofilter can be used on various levels. It provides parsers for various filtering languages, such as ECQL or CQL-JSON. Each parser lives in its own sub-package: ```python >>> from pygeofilter.parsers.ecql import parse as parse_ecql >>> filters = parse_ecql(filter_expression) >>> from pygeofilter.parsers.cql_json import parse as parse_json >>> filters = parse_json(filter_expression) ``` Each parser creates an abstract syntax tree (AST) representation of that filter expression and thus unifies all possible languages to a single common denominator. All possible nodes are defined as classes in the `pygeofilter.ast` module. ### Inspection The easiest way to inspect the resulting AST is to use the `get_repr` function, which returns a nice string representation of what was parsed: ```python >>> filters = pygeofilter.parsers.ecql.parse('id = 10') >>> print(pygeofilter.get_repr(ast)) ATTRIBUTE id = LITERAL 10.0 >>> >>> >>> filter_expr = '(number BETWEEN 5 AND 10 AND string NOT LIKE \'%B\') OR INTERSECTS(geometry, LINESTRING(0 0, 1 1))' >>> print(pygeofilter.ast.get_repr(pygeofilter.parse(filter_expr))) ( ( ATTRIBUTE number BETWEEN 5 AND 10 ) AND ( ATTRIBUTE string NOT LIKE '%B' ) ) OR ( INTERSECTS(ATTRIBUTE geometry, Geometry(geometry={'type': 'LineString', 'coordinates': ((0.0, 0.0), (1.0, 1.0))})) ) ``` ### Evaluation A parsed AST can then be evaluated and transformed into filtering mechanisms in the required context. Usually this is a language such as SQL or an object-relational mapper (ORM) interfacing a data store of some kind. There are a number of pre-defined backends available, where parsed expressions can be applied. For the moment this includes: * Django * sqlalchemy * (Geo)Pandas * Elasticsearch * OpenSearch * Pure Python object filtering The usage of those are described in their own documentation. pygeofilter provides mechanisms to help building such an evaluator (the included backends use them as well). The `Evaluator` class allows to conveniently walk through an AST depth-first and build the filters for the API in question. Only handled node classes are evaluated, unsupported ones will raise an exception. Consider this example: ```python from pygeofilter import ast from pygeofilter.backends.evaluator import Evaluator, handle from myapi import filters # <- this is where the filters are created. # of course, this can also be done in the # evaluator itself # Evaluators must derive from the base class `Evaluator` to work class MyAPIEvaluator(Evaluator): # you can use any constructor as you need def __init__(self, field_mapping=None, mapping_choices=None): self.field_mapping = field_mapping self.mapping_choices = mapping_choices # specify the handled classes in the `handle` decorator to mark # this function as the handler for that node class(es) @handle(ast.Not) def not_(self, node, sub): return filters.negate(sub) # multiple classes can be declared for the same handler function @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return filters.combine((lhs, rhs), node.op.value) # handle all sub-classes, like ast.Equal, ast.NotEqual, # ast.LessThan, ast.GreaterThan, ... @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): return filters.compare( lhs, rhs, node.op.value, self.mapping_choices ) @handle(ast.Between) def between(self, node, lhs, low, high): return filters.between( lhs, low, high, node.not_ ) @handle(ast.Like) def like(self, node, lhs): return filters.like( lhs, node.pattern, node.nocase, node.not_, self.mapping_choices ) @handle(ast.In) def in_(self, node, lhs, *options): return filters.contains( lhs, options, node.not_, self.mapping_choices ) def adopt(self, node, *sub_args): # a "catch-all" function for node classes that are not # handled elsewhere. Use with caution and raise exceptions # yourself when a node class is not supported. ... # ...further ast handlings removed for brevity ``` ## Testing For testing, several requirements must be satisfied. These can be installed, via pip: ```bash pip install -r requirements-dev.txt pip install -r requirements-test.txt ``` The functionality can be tested using `pytest`. ```bash python -m pytest ``` ### Docker To execute tests in Docker: ``` docker build -t pygeofilter/test -f Dockerfile-3.9 . docker run --rm pygeofilter/test ``` ## Backends The following backends are shipped with `pygeofilter`. Some require additional dependencies, refer to the [installation](#installation) section for further details. ### Django For Django there is a default backend implementation, where all the filters are translated to the Django ORM. In order to use this integration, we need two dictionaries, one mapping the available fields to the Django model fields, and one to map the fields that use `choices`. Consider the following example models: ```python from django.contrib.gis.db import models optional = dict(null=True, blank=True) class Record(models.Model): identifier = models.CharField(max_length=256, unique=True, null=False) geometry = models.GeometryField() float_attribute = models.FloatField(**optional) int_attribute = models.IntegerField(**optional) str_attribute = models.CharField(max_length=256, **optional) datetime_attribute = models.DateTimeField(**optional) choice_attribute = models.PositiveSmallIntegerField(choices=[ (1, 'ASCENDING'), (2, 'DESCENDING'),], **optional) class RecordMeta(models.Model): record = models.ForeignKey(Record, on_delete=models.CASCADE, related_name='record_metas') float_meta_attribute = models.FloatField(**optional) int_meta_attribute = models.IntegerField(**optional) str_meta_attribute = models.CharField(max_length=256, **optional) datetime_meta_attribute = models.DateTimeField(**optional) choice_meta_attribute = models.PositiveSmallIntegerField(choices=[ (1, 'X'), (2, 'Y'), (3, 'Z')], **optional) ``` Now we can specify the field mappings and mapping choices to be used when applying the filters: ```python FIELD_MAPPING = { 'identifier': 'identifier', 'geometry': 'geometry', 'floatAttribute': 'float_attribute', 'intAttribute': 'int_attribute', 'strAttribute': 'str_attribute', 'datetimeAttribute': 'datetime_attribute', 'choiceAttribute': 'choice_attribute', # meta fields 'floatMetaAttribute': 'record_metas__float_meta_attribute', 'intMetaAttribute': 'record_metas__int_meta_attribute', 'strMetaAttribute': 'record_metas__str_meta_attribute', 'datetimeMetaAttribute': 'record_metas__datetime_meta_attribute', 'choiceMetaAttribute': 'record_metas__choice_meta_attribute', } MAPPING_CHOICES = { 'choiceAttribute': dict(Record._meta.get_field('choice_attribute').choices), 'choiceMetaAttribute': dict(RecordMeta._meta.get_field('choice_meta_attribute').choices), } ``` Finally we are able to connect the CQL AST to the Django database models. We also provide factory functions to parse the timestamps, durations, geometries and envelopes, so that they can be used with the ORM layer: ```python from pygeofilter.backends.django import to_filter from pygeofilter.parsers.ecql import parse cql_expr = 'strMetaAttribute LIKE \'%parent%\' AND datetimeAttribute BEFORE 2000-01-01T00:00:01Z' ast = parse(cql_expr) filters = to_filter(ast, mapping, mapping_choices) qs = Record.objects.filter(**filters) ``` ### SQL `pygeofilter` provides a rudimentary way to create an SQL `WHERE` clause from an AST. The following example shows this usage in conjunction with the OGR `ExecuteSQL` function: ```python from osgeo import ogr from pygeofilter.backends.sql import to_sql_where from pygeofilter.parsers.ecql import parse FIELD_MAPPING = { 'str_attr': 'str_attr', 'maybe_str_attr': 'maybe_str_attr', 'int_attr': 'int_attr', 'float_attr': 'float_attr', 'date_attr': 'date_attr', 'datetime_attr': 'datetime_attr', 'point_attr': 'GEOMETRY', } FUNCTION_MAP = { 'sin': 'sin' } # parse the expression ast = parse('int_attr > 6') # open an OGR DataSource data = ogr.Open(...) # create the WHERE clause, field and function mappings must be provided where = to_sql_where(ast, FIELD_MAPPING, FUNCTION_MAP) # filter the DataSource to get a result Layer layer = data.ExecuteSQL(f""" SELECT id, str_attr, maybe_str_attr, int_attr, float_attr, date_attr, datetime_attr, GEOMETRY FROM layer WHERE {where} """, None, "SQLite") ``` Note that it is vital to specify the `SQLite` dialect as this is the one used internally. :warning: Input values are *not* sanitized/separated from the generated SQL text. This is due to the compatibility with the OGR API not allowing to separate the SQL from the arguments. ### Optimization This is a special kind of backend, as the result of the AST evaluation is actually a new AST. The purpose of this backend is to eliminate static branches of the AST, potentially reducing the cost of an actual evaluation for filtering values. What parts of an AST can be optimized: - Arithmetic operations of purely static operands - All predicates (spatial, temporal, array, `like`, `between`, `in`) if all of the operands are already static - Functions, when passed in a special lookup table and all arguments are static - `And` and `Or` combinators can be eliminated if either branch can be predicted What cannot be optimized are branches that contain references to attributes or functions not passed in the dictionary. The following example shows how a static computation can be optimized to a static value, replacing the whole branch of the AST: ```python >>> import math >>> from pygeofilter import ast >>> from pygeofilter.parsers.ecql import parse >>> from pygeofilter.backends.optimize import optimize >>> >>> root = parse('attr < sin(3.7) - 5') >>> optimized_root = optimize(root, {'sin': math.sin}) >>> print(ast.get_repr(root)) ATTRIBUTE attr < ( ( sin (3.7) ) - 5 ) >>> print(ast.get_repr(optimized_root)) ATTRIBUTE attr < -5.529836140908493 ``` pygeofilter-0.3.1/SECURITY.md000066400000000000000000000010651473475122500156510ustar00rootroot00000000000000# pygeofilter Security Policy ## Reporting Security/vulnerability reports **should not** be submitted through GitHub issues or public discussions, but instead please send your report to **geopython-security nospam @ lists.osgeo.org** - (remove the blanks and 'nospam'). ## Supported Versions The pygeofilter developer team will release patches for security vulnerabilities for the following versions: | Version | Supported | | ------- | ------------------ | | latest stable version | :white_check_mark: | | previous versions | :x: | pygeofilter-0.3.1/debian/000077500000000000000000000000001473475122500153005ustar00rootroot00000000000000pygeofilter-0.3.1/debian/changelog000066400000000000000000000002251473475122500171510ustar00rootroot00000000000000pygeofilter (0.0.3-0~focal0) focal; urgency=low * Initial packaging. -- Angelos Tzotsos Tue, 12 Oct 2021 13:00:00 +0300pygeofilter-0.3.1/debian/compat000066400000000000000000000000021473475122500164760ustar00rootroot000000000000009 pygeofilter-0.3.1/debian/control000066400000000000000000000013271473475122500167060ustar00rootroot00000000000000Source: pygeofilter Maintainer: Fabian Schindler Uploaders: Angelos Tzotsos Section: python Priority: optional Build-Depends: debhelper (>= 9), python3-setuptools, dh-python, dpkg-dev (>= 1.16), autoconf, python3-all, python3-all-dev Standards-Version: 3.9.3 Homepage: https://github.com/geopython/pygeofilter Package: python3-pygeofilter Architecture: any Section: web Depends: ${shlibs:Depends}, ${misc:Depends}, python3 Description: This package contains the pygeofilter library . pygeofilter is a pure Python parser implementation of OGC filtering standards. pygeofilter-0.3.1/debian/copyright000066400000000000000000000023671473475122500172430ustar00rootroot00000000000000This package was debianized by Angelos Tzotsos on Tue, 12 Oct 2021 13:00:00 +0300. It was downloaded from: https://github.com/geopython/pygeofilter Copyright: Copyright (c) 2021 geopython License: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies of this Software or works derived from this Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pygeofilter-0.3.1/debian/rules000077500000000000000000000004061473475122500163600ustar00rootroot00000000000000#!/usr/bin/make -f # -*- makefile -*- # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 export PYBUILD_NAME=pygeofilter %: dh $@ --with python3 --buildsystem pybuild override_dh_auto_test: @echo "nocheck set, not running tests" pygeofilter-0.3.1/debian/source/000077500000000000000000000000001473475122500166005ustar00rootroot00000000000000pygeofilter-0.3.1/debian/source/format000066400000000000000000000000141473475122500200060ustar00rootroot000000000000003.0 (quilt) pygeofilter-0.3.1/docs/000077500000000000000000000000001473475122500150065ustar00rootroot00000000000000pygeofilter-0.3.1/docs/.gitignore000066400000000000000000000000041473475122500167700ustar00rootroot00000000000000api pygeofilter-0.3.1/docs/Makefile000066400000000000000000000011041473475122500164420ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)pygeofilter-0.3.1/docs/conf.py000066400000000000000000000132451473475122500163120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- project = "pygeofilter" copyright = "2021, Fabian Schindler" author = "Fabian Schindler" # The short X.Y version version = "" # The full version, including alpha/beta/rc tags release = "0.0.3" # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinxcontrib.apidoc", "m2r2", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "pygeofilterdoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "pygeofilter.tex", "pygeofilter Documentation", "Fabian Schindler", "manual", ), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "pygeofilter", "pygeofilter Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "pygeofilter", "pygeofilter Documentation", author, "pygeofilter", "One line description of project.", "Miscellaneous", ), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] # -- Extension configuration ------------------------------------------------- intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "django": ("https://django.readthedocs.org/en/latest/", None), } # apidoc configs: apidoc_module_dir = "../pygeofilter" apidoc_output_dir = "api" # apidoc_excluded_paths = ['tests'] # apidoc_separate_modules = True # apidoc_module_first = True pygeofilter-0.3.1/docs/contributing.rst000066400000000000000000000000411473475122500202420ustar00rootroot00000000000000.. mdinclude:: ../CONTRIBUTING.mdpygeofilter-0.3.1/docs/index.rst000066400000000000000000000003301473475122500166430ustar00rootroot00000000000000.. mdinclude:: ../README.md .. toctree:: :maxdepth: 2 :caption: Contents: license contributing api/modules Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` pygeofilter-0.3.1/docs/license.rst000066400000000000000000000000651473475122500171630ustar00rootroot00000000000000License ======= .. include:: ../LICENSE :literal:pygeofilter-0.3.1/docs/make.bat000066400000000000000000000014271473475122500164170ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd pygeofilter-0.3.1/docs/requirements.txt000066400000000000000000000000321473475122500202650ustar00rootroot00000000000000sphinxcontrib-apidoc m2r2 pygeofilter-0.3.1/pygeofilter/000077500000000000000000000000001473475122500164075ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/__init__.py000066400000000000000000000030051473475122500205160ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from .version import __version__ __all__ = ["__version__"] pygeofilter-0.3.1/pygeofilter/ast.py000066400000000000000000000430631473475122500175560ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from dataclasses import dataclass from enum import Enum from typing import ClassVar, List, Optional, Union from . import values AstType = Union["Node", values.ValueType, list] ScalarAstType = Union["Node", int, float] SpatialAstType = Union["Node", values.SpatialValueType] TemporalAstType = Union["Node", values.TemporalValueType] ArrayAstType = Union["Node", List[AstType]] class Node: """The base class for all other nodes to display the AST of CQL.""" inline: bool = False def get_sub_nodes(self) -> List[AstType]: """Get a list of sub-node of this node. :return: a list of all sub-nodes :rtype: list[Node] """ return [] def get_template(self) -> str: """Get a template string (using the ``.format`` method) to represent the current node and sub-nodes. The template string must provide a template replacement for each sub-node reported by :func:`~pygeofilter.ast.Node.get_sub_nodes`. :return: the template to render """ raise NotImplementedError def __eq__(self, other) -> bool: if not isinstance(other, self.__class__): return False self_dict = { k: v.__geo_interface__ if hasattr(v, "__geo_interface__") else v for k, v in self.__dict__.items() } other_dict = { k: v.__geo_interface__ if hasattr(v, "__geo_interface__") else v for k, v in other.__dict__.items() } return self_dict == other_dict class Condition(Node): """The base class for all nodes representing a condition""" pass class Not(Condition): """ Node class to represent a negation condition. :ivar sub_node: the condition node to be negated :type sub_node: Node """ def __init__(self, sub_node: Node): self.sub_node = sub_node def get_sub_nodes(self) -> List[AstType]: """Returns the sub-node for the negated condition.""" return [self.sub_node] def get_template(self) -> str: return "NOT {}" class CombinationOp(Enum): AND = "AND" OR = "OR" @dataclass class Combination(Condition): """Node class to represent a condition to combine two other conditions using either AND or OR. """ lhs: Node rhs: Node op: ClassVar[CombinationOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{{}} {self.op.name} {{}}" @classmethod def from_items(cls, first, *rest) -> Node: result = first for item in rest: result = cls(result, item) return result @dataclass class And(Combination): op: ClassVar[CombinationOp] = CombinationOp.AND @dataclass class Or(Combination): op: ClassVar[CombinationOp] = CombinationOp.OR class Predicate(Node): """The base class for all nodes representing a predicate""" pass class ComparisonOp(Enum): EQ = "=" NE = "<>" LT = "<" LE = "<=" GT = ">" GE = ">=" @dataclass class Comparison(Predicate): """Node class to represent a comparison predicate: to compare two expressions using a comparison operation. """ lhs: ScalarAstType rhs: ScalarAstType op: ClassVar[ComparisonOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{{}} {self.op.value} {{}}" @dataclass class Equal(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.EQ @dataclass class NotEqual(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.NE @dataclass class LessThan(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.LT @dataclass class LessEqual(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.LE @dataclass class GreaterThan(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.GT @dataclass class GreaterEqual(Comparison): op: ClassVar[ComparisonOp] = ComparisonOp.GE @dataclass class Between(Predicate): """Node class to represent a BETWEEN predicate: to check whether an expression value within a range. """ lhs: Node low: ScalarAstType high: ScalarAstType not_: bool def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.low, self.high] def get_template(self) -> str: return f"{{}} {'NOT ' if self.not_ else ''}BETWEEN {{}} AND {{}}" @dataclass class Like(Predicate): """Node class to represent a wildcard sting matching predicate.""" lhs: Node pattern: str nocase: bool wildcard: str singlechar: str escapechar: str not_: bool def get_sub_nodes(self) -> List[AstType]: return [self.lhs] def get_template(self) -> str: return ( f"{{}} {'NOT ' if self.not_ else ''}" f"{'I' if self.nocase else ''}LIKE '{self.pattern}'" # TODO wildcard, singlechar, escapechar ) @dataclass class In(Predicate): """Node class to represent list checking predicate.""" lhs: AstType sub_nodes: List[AstType] not_: bool def get_sub_nodes(self) -> List[AstType]: return [self.lhs] + list(self.sub_nodes) def get_template(self) -> str: return ( f"{{}} {'NOT ' if self.not_ else ''}IN " f"{', '.join(['{}'] * len(self.sub_nodes))}" ) @dataclass class IsNull(Predicate): """Node class to represent null check predicate.""" lhs: AstType not_: bool def get_sub_nodes(self) -> List[AstType]: return [self.lhs] def get_template(self) -> str: return f"{{}} IS {('NOT ' if self.not_ else '')}NULL" @dataclass class Exists(Predicate): lhs: AstType not_: bool def get_sub_nodes(self) -> List[AstType]: return [self.lhs] def get_template(self) -> str: return f"{{}} {('DOES-NOT-EXIST' if self.not_ else 'EXISTS')}" @dataclass class Include(Predicate): not_: bool def get_template(self) -> str: return "EXCLUDE" if self.not_ else "INCLUDE" # https://portal.ogc.org/files/96288#enhanced-temporal-operators # BEFORE <======> <-----> AFTER # MEETS <----------> METBY # TOVERLAPS <--------------> OVERLAPPEDBY # BEGINS <------------------> BEGUNBY # DURING <----------------------> TCONTAINS # TENDS <----------> ENDEDBY # TEQUALS <------> TEQUALS # DISJOINT: If a proper interval T1 is intervalDisjoint another proper # interval T2,then the beginning of T1 is after the end of T2, or the end of # T1 is before the beginning of T2, i.e. the intervals do not overlap in any # way, but their ordering relationship is not known. # https://github.com/geotools/geotools/blob/main/modules/library/cql/ECQL.md#temporal-predicate # BEFORE_OR_DURING <-----> # DURING_OR_AFTER <-----> class TemporalComparisonOp(Enum): DISJOINT = "DISJOINT" AFTER = "AFTER" BEFORE = "BEFORE" BEGINS = "BEGINS" BEGUNBY = "BEGUNBY" TCONTAINS = "TCONTAINS" DURING = "DURING" ENDEDBY = "ENDEDBY" ENDS = "ENDS" TEQUALS = "TEQUALS" MEETS = "MEETS" METBY = "METBY" TOVERLAPS = "TOVERLAPS" OVERLAPPEDBY = "OVERLAPPEDBY" BEFORE_OR_DURING = "BEFORE OR DURING" DURING_OR_AFTER = "DURING OR AFTER" @dataclass class TemporalPredicate(Predicate): """Node class to represent temporal predicate.""" lhs: TemporalAstType rhs: TemporalAstType op: ClassVar[TemporalComparisonOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{{}} {self.op} {{}}" @dataclass class TimeDisjoint(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DISJOINT @dataclass class TimeAfter(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.AFTER @dataclass class TimeBefore(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEFORE @dataclass class TimeBegins(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEGINS @dataclass class TimeBegunBy(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEGUNBY @dataclass class TimeContains(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TCONTAINS @dataclass class TimeDuring(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DURING @dataclass class TimeEndedBy(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.ENDEDBY @dataclass class TimeEnds(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.ENDS @dataclass class TimeEquals(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TEQUALS @dataclass class TimeMeets(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.MEETS @dataclass class TimeMetBy(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.METBY @dataclass class TimeOverlaps(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TOVERLAPS @dataclass class TimeOverlappedBy(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.OVERLAPPEDBY @dataclass class TimeBeforeOrDuring(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEFORE_OR_DURING @dataclass class TimeDuringOrAfter(TemporalPredicate): op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DURING_OR_AFTER class ArrayComparisonOp(Enum): AEQUALS = "AEQUALS" ACONTAINS = "ACONTAINS" ACONTAINEDBY = "ACONTAINEDBY" AOVERLAPS = "AOVERLAPS" @dataclass class ArrayPredicate(Predicate): """Node class to represent array predicates.""" lhs: ArrayAstType rhs: ArrayAstType op: ClassVar[ArrayComparisonOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{{}} {self.op} {{}}" @dataclass class ArrayEquals(ArrayPredicate): op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.AEQUALS @dataclass class ArrayContains(ArrayPredicate): op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.ACONTAINS @dataclass class ArrayContainedBy(ArrayPredicate): op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.ACONTAINEDBY @dataclass class ArrayOverlaps(ArrayPredicate): op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.AOVERLAPS class SpatialComparisonOp(Enum): INTERSECTS = "INTERSECTS" DISJOINT = "DISJOINT" CONTAINS = "CONTAINS" WITHIN = "WITHIN" TOUCHES = "TOUCHES" CROSSES = "CROSSES" OVERLAPS = "OVERLAPS" EQUALS = "EQUALS" @dataclass class SpatialComparisonPredicate(Predicate): """Node class to represent spatial relation predicate.""" lhs: SpatialAstType rhs: SpatialAstType op: ClassVar[SpatialComparisonOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{self.op.name}({{}}, {{}})" @dataclass class GeometryIntersects(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.INTERSECTS @dataclass class GeometryDisjoint(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.DISJOINT @dataclass class GeometryContains(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.CONTAINS @dataclass class GeometryWithin(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.WITHIN @dataclass class GeometryTouches(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.TOUCHES @dataclass class GeometryCrosses(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.CROSSES @dataclass class GeometryOverlaps(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.OVERLAPS @dataclass class GeometryEquals(SpatialComparisonPredicate): op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.EQUALS @dataclass class Relate(Predicate): """Node class to represent spatial relation predicate.""" lhs: SpatialAstType rhs: SpatialAstType pattern: str def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"RELATE({{}}, {{}}, '{self.pattern}')" class SpatialDistanceOp(Enum): DWITHIN = "DWITHIN" BEYOND = "BEYOND" @dataclass class SpatialDistancePredicate(Predicate): """Node class to represent spatial relation predicate.""" lhs: SpatialAstType rhs: SpatialAstType distance: float units: str op: ClassVar[SpatialDistanceOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{self.op.name}({{}}, {{}}, {self.distance}, '{self.units}')" @dataclass class DistanceWithin(SpatialDistancePredicate): op: ClassVar[SpatialDistanceOp] = SpatialDistanceOp.DWITHIN @dataclass class DistanceBeyond(SpatialDistancePredicate): op: ClassVar[SpatialDistanceOp] = SpatialDistanceOp.BEYOND @dataclass class BBox(Predicate): """Node class to represent a bounding box predicate.""" lhs: SpatialAstType minx: float miny: float maxx: float maxy: float crs: Optional[str] = None def get_sub_nodes(self) -> List[AstType]: return [self.lhs] def get_template(self) -> str: return ( f"BBOX({{}}, {self.minx}, {self.miny}, {self.maxx}, " f"{self.maxy}, {repr(self.crs)})" ) class Expression(Node): """The base class for all nodes representing expressions""" pass class Attribute(Expression): """Node class to represent attribute lookup expressions :ivar name: the name of the attribute to be accessed :type name: str """ inline = True def __init__(self, name): self.name = name def __repr__(self): return f"ATTRIBUTE {self.name}" class ArithmeticOp(Enum): ADD = "+" SUB = "-" MUL = "*" DIV = "/" @dataclass class Arithmetic(Expression): """Node class to represent arithmetic operation expressions with two sub-expressions and an operator. """ lhs: ScalarAstType rhs: ScalarAstType op: ClassVar[ArithmeticOp] def get_sub_nodes(self) -> List[AstType]: return [self.lhs, self.rhs] def get_template(self) -> str: return f"{{}} {self.op.value} {{}}" @dataclass class Add(Arithmetic): op: ClassVar[ArithmeticOp] = ArithmeticOp.ADD @dataclass class Sub(Arithmetic): op: ClassVar[ArithmeticOp] = ArithmeticOp.SUB @dataclass class Mul(Arithmetic): op: ClassVar[ArithmeticOp] = ArithmeticOp.MUL @dataclass class Div(Arithmetic): op: ClassVar[ArithmeticOp] = ArithmeticOp.DIV @dataclass class Function(Expression): """Node class to represent function invocations.""" name: str arguments: List[AstType] def get_sub_nodes(self) -> List[AstType]: return self.arguments def get_template(self) -> str: return f"{self.name} ({', '.join(['{}'] * len(self.arguments))})" def indent(text: str, amount: int, ch: str = " ") -> str: padding = amount * ch return "".join(padding + line for line in text.splitlines(True)) def get_repr(node: Node, indent_amount: int = 0, indent_incr: int = 4) -> str: """Get a debug representation of the given AST node. ``indent_amount`` and ``indent_incr`` are for the recursive call and don't need to be passed. """ sub_nodes = node.get_sub_nodes() template = node.get_template() args = [] for sub_node in sub_nodes: if isinstance(sub_node, Node) and not sub_node.inline: args.append( "(\n{}\n)".format( indent( get_repr(sub_node, indent_amount + indent_incr, indent_incr), indent_amount + indent_incr, ) ) ) else: args.append(repr(sub_node)) return template.format(*args) pygeofilter-0.3.1/pygeofilter/backends/000077500000000000000000000000001473475122500201615ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/__init__.py000066400000000000000000000000001473475122500222600ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/cql2_json/000077500000000000000000000000001473475122500220535ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/cql2_json/__init__.py000066400000000000000000000000651473475122500241650ustar00rootroot00000000000000from .evaluate import to_cql2 __all__ = ["to_cql2"] pygeofilter-0.3.1/pygeofilter/backends/cql2_json/evaluate.py000066400000000000000000000102111473475122500242260ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler , # David Bitner # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import date, datetime from typing import Dict, Optional from ... import ast, values from ...cql2 import get_op from ..evaluator import Evaluator, handle def json_serializer(obj): if isinstance(obj, (datetime, date)): return obj.isoformat() if hasattr(obj, "name"): return obj.name raise TypeError(f"{obj} with type {type(obj)} is not serializable.") class CQL2Evaluator(Evaluator): def __init__( self, attribute_map: Optional[Dict[str, str]], function_map: Optional[Dict[str, str]], ): self.attribute_map = attribute_map self.function_map = function_map @handle( ast.Condition, ast.Comparison, ast.TemporalPredicate, ast.SpatialComparisonPredicate, ast.Arithmetic, ast.ArrayPredicate, subclasses=True, ) def comparison(self, node, *args): op = get_op(node) return {"op": op, "args": [*args]} @handle(ast.Between) def between(self, node, lhs, low, high): return {"op": "between", "args": [lhs, [low, high]]} @handle(ast.Like) def like(self, node, *subargs): return {"op": "like", "args": [subargs[0], node.pattern]} @handle(ast.IsNull) def isnull(self, node, arg): return {"op": "isNull", "args": [arg]} @handle(ast.Function) def function(self, node, *args): name = node.name.lower() if name == "lower": ret = {"lower": args[0]} elif name == "upper": ret = {"upper": args[0]} else: ret = {"function": name, "args": [*args]} return ret @handle(ast.In) def in_(self, node, lhs, *options): return {"op": "in", "args": [lhs, options]} @handle(ast.Attribute) def attribute(self, node: ast.Attribute): return {"property": node.name} @handle(values.Interval) def interval(self, node: values.Interval, start, end): return {"interval": [start, end]} @handle(datetime) def datetime(self, node: ast.Attribute): return {"timestamp": node.name} @handle(*values.LITERALS) def literal(self, node): return node @handle(values.Geometry) def geometry(self, node: values.Geometry): return node.__geo_interface__ @handle(values.Envelope) def envelope(self, node: values.Envelope): return node.__geo_interface__ def to_cql2( root: ast.Node, field_mapping: Optional[Dict[str, str]] = None, function_map: Optional[Dict[str, str]] = None, ) -> str: return json.dumps( CQL2Evaluator(field_mapping, function_map).evaluate(root), default=json_serializer, ) pygeofilter-0.3.1/pygeofilter/backends/django/000077500000000000000000000000001473475122500214235ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/django/__init__.py000066400000000000000000000000711473475122500235320ustar00rootroot00000000000000from .evaluate import to_filter __all__ = ["to_filter"] pygeofilter-0.3.1/pygeofilter/backends/django/evaluate.py000066400000000000000000000126561473475122500236150ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from django.contrib.gis.geos import GEOSGeometry, Polygon from ... import ast, values from ..evaluator import Evaluator, handle from . import filters class DjangoFilterEvaluator(Evaluator): def __init__(self, field_mapping, mapping_choices): self.field_mapping = field_mapping self.mapping_choices = mapping_choices @handle(ast.Not) def not_(self, node, sub): return filters.negate(sub) @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return filters.combine((lhs, rhs), node.op.value) @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): return filters.compare(lhs, rhs, node.op.value, self.mapping_choices) @handle(ast.Between) def between(self, node, lhs, low, high): return filters.between(lhs, low, high, node.not_) @handle(ast.Like) def like(self, node, lhs): return filters.like( lhs, node.pattern, node.nocase, node.not_, self.mapping_choices ) @handle(ast.In) def in_(self, node, lhs, *options): return filters.contains(lhs, options, node.not_, self.mapping_choices) @handle(ast.IsNull) def null(self, node, lhs): return filters.null(lhs, node.not_) # @handle(ast.ExistsPredicateNode) # def exists(self, node, lhs): # if self.use_getattr: # result = hasattr(self.obj, node.lhs.name) # else: # result = lhs in self.obj # if node.not_: # result = not result # return result @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node, lhs, rhs): return filters.temporal( lhs, rhs, node.op.value, ) @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): return filters.spatial( lhs, rhs, node.op.name, ) @handle(ast.Relate) def spatial_pattern(self, node, lhs, rhs): return filters.spatial_relate( lhs, rhs, pattern=node.pattern, ) @handle(ast.SpatialDistancePredicate, subclasses=True) def spatial_distance(self, node, lhs, rhs): return filters.spatial_distance( lhs, rhs, node.op.value, distance=node.distance, units=node.units, ) @handle(ast.BBox) def bbox(self, node, lhs): return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs) @handle(ast.Attribute) def attribute(self, node): return filters.attribute(node.name, self.field_mapping) @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node, lhs, rhs): return filters.arithmetic(lhs, rhs, node.op.value) # TODO: map functions # @handle(ast.FunctionExpressionNode) # def function(self, node, *arguments): # return self.function_map[node.name](*arguments) @handle(*values.LITERALS) def literal(self, node): return filters.literal(node) @handle(values.Interval) def interval(self, node, start, end): return filters.literal((start, end)) @handle(values.Geometry) def geometry(self, node): return GEOSGeometry(json.dumps(node.__geo_interface__)) @handle(values.Envelope) def envelope(self, node): return Polygon.from_bbox((node.x1, node.y1, node.x2, node.y2)) def to_filter(root, field_mapping=None, mapping_choices=None): """Helper function to translate ECQL AST to Django Query expressions. :param ast: the abstract syntax tree :param field_mapping: a dict mapping from the filter name to the Django field lookup. :param mapping_choices: a dict mapping field lookups to choices. :type ast: :class:`Node` :returns: a Django query object :rtype: :class:`django.db.models.Q` """ return DjangoFilterEvaluator(field_mapping, mapping_choices).evaluate(root) pygeofilter-0.3.1/pygeofilter/backends/django/filters.py000066400000000000000000000423671473475122500234610ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from datetime import datetime, timedelta from functools import reduce from operator import add, and_, mul, or_, sub, truediv from typing import Dict, List, Optional, Union from django.contrib.gis.gdal import SpatialReference from django.contrib.gis.geos import Polygon from django.contrib.gis.measure import D from django.db.models import F, Q, Value from django.db.models.expressions import Expression ArithmeticType = Union[Expression, F, Value, int, float] # ------------------------------------------------------------------------------ # Filters # ------------------------------------------------------------------------------ def combine(sub_filters: List[Q], combinator: str = "AND") -> Q: """Combine filters using a logical combinator""" op = and_ if combinator == "AND" else or_ return reduce(lambda acc, q: op(acc, q) if acc else q, sub_filters) def negate(sub_filter: Q) -> Q: """Negate a filter, opposing its meaning.""" return ~sub_filter OP_TO_COMP = {"<": "lt", "<=": "lte", ">": "gt", ">=": "gte", "<>": None, "=": "exact"} INVERT_COMP: Dict[Optional[str], str] = { "lt": "gt", "lte": "gte", "gt": "lt", "gte": "lte", } def compare( lhs: Union[F, Value], rhs: Union[F, Value], op: str, mapping_choices: Optional[Dict[str, Dict[str, str]]] = None, ) -> Q: """Compare a filter with an expression using a comparison operation :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param rhs: the filter expression :type rhs: :class:`django.db.models.F` :param op: a string denoting the operation. one of ``"<"``, ``"<="``, ``">"``, ``">="``, ``"<>"``, ``"="`` :type op: str :param mapping_choices: a dict to lookup potential choices for a certain field. :type mapping_choices: dict[str, str] :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ comp = OP_TO_COMP[op] # if the left hand side is not a field reference, the comparison # can be be inverted to try if the right hand side is a field # reference. if not isinstance(lhs, F): lhs, rhs = rhs, lhs comp = INVERT_COMP.get(comp, comp) # if neither lhs and rhs are fields, we have to fail here if not isinstance(lhs, F): raise ValueError(f"Unable to compare non-field {lhs}") field_name = lhs.name if mapping_choices and field_name in mapping_choices: try: if isinstance(rhs, str): rhs = mapping_choices[field_name][rhs] elif hasattr(rhs, "value"): rhs = Value(mapping_choices[field_name][rhs.value]) except KeyError as e: raise AssertionError("Invalid field value %s" % e) if comp: return Q(**{"%s__%s" % (lhs.name, comp): rhs}) return ~Q(**{field_name: rhs}) def between( lhs: F, low: Union[F, Value], high: Union[F, Value], not_: bool = False ) -> Q: """Create a filter to match elements that have a value within a certain range. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param low: the lower value of the range :type low: :param high: the upper value of the range :type high: :param not_: whether the range shall be inclusive (the default) or exclusive :type not_: bool :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ q = Q(**{"%s__range" % lhs.name: (low, high)}) return ~q if not_ else q def like( lhs: F, pattern: str, nocase: bool = False, not_: bool = False, mapping_choices: Optional[Dict[str, Dict[str, str]]] = None, ) -> Q: """Create a filter to filter elements according to a string attribute using wildcard expressions. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param rhs: the wildcard pattern: a string containing any number of '%' characters as wildcards. :type rhs: str :param case: whether the lookup shall be done case sensitively or not :type case: bool :param not_: whether the range shall be inclusive (the default) or exclusive :type not_: bool :param mapping_choices: a dict to lookup potential choices for a certain field. :type mapping_choices: dict[str, str] :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ parts = pattern.split("%") length = len(parts) if mapping_choices and lhs.name in mapping_choices: # special case when choices are given for the field: # compare statically and use 'in' operator to check if contained cmp_av = [ (a, a.lower() if nocase else a) for a in mapping_choices[lhs.name].keys() ] for idx, part in enumerate(parts): if not part: continue cmp_p = part.lower() if nocase else part if idx == 0 and length > 1: # startswith cmp_av = [a for a in cmp_av if a[1].startswith(cmp_p)] elif idx == 0: # exact matching cmp_av = [a for a in cmp_av if a[1] == cmp_p] elif idx == length - 1: # endswith cmp_av = [a for a in cmp_av if a[1].endswith(cmp_p)] else: # middle cmp_av = [a for a in cmp_av if cmp_p in a[1]] q = Q( **{"%s__in" % lhs.name: [mapping_choices[lhs.name][a[0]] for a in cmp_av]} ) else: i = "i" if nocase else "" q = None for idx, part in enumerate(parts): if not part: continue if idx == 0 and length > 1: # startswith new_q = Q(**{"%s__%s" % (lhs.name, "%sstartswith" % i): part}) elif idx == 0: # exact matching new_q = Q(**{"%s__%s" % (lhs.name, "%sexact" % i): part}) elif idx == length - 1: # endswith new_q = Q(**{"%s__%s" % (lhs.name, "%sendswith" % i): part}) else: # middle new_q = Q(**{"%s__%s" % (lhs.name, "%scontains" % i): part}) q = q & new_q if q else new_q return ~q if not_ else q def contains( lhs: F, items: List[Union[F, Value]], not_: bool = False, mapping_choices: Optional[Dict[str, Dict[str, str]]] = None, ) -> Q: """Create a filter to match elements attribute to be in a list of choices. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param items: a list of choices :type items: list :param not_: whether the range shall be inclusive (the default) or exclusive :type not_: bool :param mapping_choices: a dict to lookup potential choices for a certain field. :type mapping_choices: dict[str, str] :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ if mapping_choices is not None and lhs.name in mapping_choices: def map_value( item: Union[str, Value], choices: Dict[str, str] ) -> Union[str, Value]: try: if isinstance(item, str): item = choices[item] elif isinstance(item, Value): item = Value(choices[item.value]) except KeyError as e: raise AssertionError("Invalid field value %s" % e) return item items = [map_value(item, mapping_choices[lhs.name]) for item in items] q = Q(**{"%s__in" % lhs.name: items}) return ~q if not_ else q def null(lhs: F, not_: bool = False) -> Q: """Create a filter to match elements whose attribute is (not) null :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param not_: whether the range shall be inclusive (the default) or exclusive :type not_: bool :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ return Q(**{"%s__isnull" % lhs.name: not not_}) def temporal(lhs: F, time_or_period: Value, op: str) -> Q: """Create a temporal filter for the given temporal attribute. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param time_or_period: the time instant or time span to use as a filter :type time_or_period: :class:`datetime.datetime` or a tuple of two datetimes or a tuple of one datetime and one :class:`datetime.timedelta` :param op: the comparison operation. one of ``"BEFORE"``, ``"BEFORE OR DURING"``, ``"DURING"``, ``"DURING OR AFTER"``, ``"AFTER"``. :type op: str :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ assert op in ("BEFORE", "BEFORE OR DURING", "DURING", "DURING OR AFTER", "AFTER") time_or_period = time_or_period.value low: Union[datetime, timedelta, None] = None high: Union[datetime, timedelta, None] = None if op in ("BEFORE", "AFTER"): assert isinstance(time_or_period, datetime) if op == "BEFORE": high = time_or_period else: low = time_or_period else: low, high = time_or_period low = low.value if isinstance(low, Value) else low high = high.value if isinstance(high, Value) else high assert isinstance(low, datetime) or isinstance(high, datetime) if isinstance(low, timedelta) and isinstance(high, datetime): low = high - low if isinstance(low, datetime) and isinstance(high, timedelta): high = low + high if low and high: return Q(**{"%s__range" % lhs.name: (low, high)}) elif low: return Q(**{"%s__gte" % lhs.name: low}) else: return Q(**{"%s__lte" % lhs.name: high}) def time_interval( time_or_period: Value, containment: str = "overlaps", begin_time_field: str = "begin_time", end_time_field: str = "end_time", ) -> Q: """ """ gt_op = "__gte" lt_op = "__lte" is_slice = len(time_or_period) == 1 if len(time_or_period) == 1: is_slice = True value = time_or_period[0] else: is_slice = False low, high = time_or_period if is_slice or (high == low and containment == "overlaps"): return Q( **{ begin_time_field + "__lte": time_or_period[0], end_time_field + "__gte": time_or_period[0], } ) elif high == low: return Q(**{begin_time_field + "__gte": value, end_time_field + "__lte": value}) else: q = Q() # check if the temporal bounds must be strictly contained if containment == "contains": if high is not None: q &= Q(**{end_time_field + lt_op: high}) if low is not None: q &= Q(**{begin_time_field + gt_op: low}) # or just overlapping else: if high is not None: q &= Q(**{begin_time_field + lt_op: high}) if low is not None: q &= Q(**{end_time_field + gt_op: low}) return q UNITS_LOOKUP = {"kilometers": "km", "meters": "m"} INVERT_SPATIAL_OP = { "WITHIN": "CONTAINS", "CONTAINS": "WITHIN", } def spatial( lhs: Union[F, Value], rhs: Union[F, Value], op: str, pattern: Optional[str] = None, distance: Optional[float] = None, units: Optional[str] = None, ) -> Q: """Create a spatial filter for the given spatial attribute. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param rhs: the time instant or time span to use as a filter :type rhs: :param op: the comparison operation. one of ``"INTERSECTS"``, ``"DISJOINT"``, `"CONTAINS"``, ``"WITHIN"``, ``"TOUCHES"``, ``"CROSSES"``, ``"OVERLAPS"``, ``"EQUALS"``, ``"RELATE"``, ``"DWITHIN"``, ``"BEYOND"`` :type op: str :param pattern: the spatial relation pattern :type pattern: str :param distance: the distance value for distance based lookups: ``"DWITHIN"`` and ``"BEYOND"`` :type distance: float :param units: the units the distance is expressed in :type units: str :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ assert op in ( "INTERSECTS", "DISJOINT", "CONTAINS", "WITHIN", "TOUCHES", "CROSSES", "OVERLAPS", "EQUALS", "RELATE", "DWITHIN", "BEYOND", ) # if the left hand side is not a field reference, the comparison # can be be inverted to try if the right hand side is a field # reference. if not isinstance(lhs, F): lhs, rhs = rhs, lhs op = INVERT_SPATIAL_OP.get(op, op) # if neither lhs and rhs are fields, we have to fail here if not isinstance(lhs, F): raise ValueError(f"Unable to compare non-field {lhs}") return Q(**{"%s__%s" % (lhs.name, op.lower()): rhs}) def spatial_relate(lhs: Union[F, Value], rhs: Union[F, Value], pattern: str) -> Q: if not isinstance(lhs, F): # TODO: cannot yet invert pattern -> raise raise ValueError(f"Unable to compare non-field {lhs}") return Q(**{"%s__relate" % lhs.name: (rhs, pattern)}) def spatial_distance( lhs: Union[F, Value], rhs: Union[F, Value], op: str, distance: float, units: str ) -> Q: if not isinstance(lhs, F): lhs, rhs = rhs, lhs # if neither lhs and rhs are fields, we have to fail here if not isinstance(lhs, F): raise ValueError(f"Unable to compare non-field {lhs}") # TODO: maybe use D.unit_attname(units) d = D(**{UNITS_LOOKUP[units]: distance}) if op == "DWITHIN": return Q(**{"%s__distance_lte" % lhs.name: (rhs, d, "spheroid")}) return Q(**{"%s__distance_gte" % lhs.name: (rhs, d, "spheroid")}) def bbox( lhs: F, minx: float, miny: float, maxx, maxy: float, crs: Optional[str] = None, bboverlaps: bool = True, ) -> Q: """Create a bounding box filter for the given spatial attribute. :param lhs: the field to compare :param minx: the lower x part of the bbox :type minx: float :param miny: the lower y part of the bbox :type miny: float :param maxx: the upper x part of the bbox :type maxx: float :param maxy: the upper y part of the bbox :type maxy: float :param crs: the CRS the bbox is expressed in :type crs: str :type lhs: :class:`django.db.models.F` :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ box = Polygon.from_bbox((minx, miny, maxx, maxy)) if crs: box.srid = SpatialReference(crs).srid box.transform(4326) if bboverlaps: return Q(**{"%s__bboverlaps" % lhs.name: box}) return Q(**{"%s__intersects" % lhs.name: box}) def attribute(name: str, field_mapping: Optional[Dict[str, str]] = None) -> F: """Create an attribute lookup expression using a field mapping dictionary. :param name: the field filter name :type name: str :param field_mapping: the dictionary to use as a lookup. :rtype: :class:`django.db.models.F` """ if field_mapping: field = field_mapping.get(name, name) else: field = name return F(field) def literal(value) -> Value: return Value(value) OP_TO_FUNC = {"+": add, "-": sub, "*": mul, "/": truediv} def arithmetic(lhs: ArithmeticType, rhs: ArithmeticType, op: str) -> ArithmeticType: """Create an arithmetic filter :param lhs: left hand side of the arithmetic expression. either a scalar or a field lookup or another type of expression :param rhs: same as `lhs` :param op: the arithmetic operation. one of ``"+"``, ``"-"``, ``"*"``, ``"/"`` :rtype: :class:`django.db.models.F` """ func = OP_TO_FUNC[op] return func(lhs, rhs) pygeofilter-0.3.1/pygeofilter/backends/elasticsearch/000077500000000000000000000000001473475122500227735ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/elasticsearch/__init__.py000066400000000000000000000030611473475122500251040ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ Elasticsearch backend for pygeofilter. """ from .evaluate import to_filter __all__ = ["to_filter"] pygeofilter-0.3.1/pygeofilter/backends/elasticsearch/evaluate.py000066400000000000000000000235411473475122500251600ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ Elasticsearch filter evaluator. Uses elasticsearch-dsl package to create filter objects. """ # pylint: disable=E1130,C0103,W0223 from datetime import date, datetime from typing import Dict, Optional, Union from elasticsearch_dsl import Q from packaging.version import Version from ... import ast, values from ..evaluator import Evaluator, handle from .util import like_to_wildcard VERSION_7_10_0 = Version("7.10.0") COMPARISON_OP_MAP = { ast.ComparisonOp.LT: "lt", ast.ComparisonOp.LE: "lte", ast.ComparisonOp.GT: "gt", ast.ComparisonOp.GE: "gte", } ARITHMETIC_OP_MAP = { ast.ArithmeticOp.ADD: "+", ast.ArithmeticOp.SUB: "-", ast.ArithmeticOp.MUL: "*", ast.ArithmeticOp.DIV: "/", } class ElasticSearchDSLEvaluator(Evaluator): """A filter evaluator for Elasticsearch DSL.""" def __init__( self, attribute_map: Optional[Dict[str, str]] = None, version: Optional[Version] = None, ): self.attribute_map = attribute_map self.version = version or Version("7.1.0") @handle(ast.Not) def not_(self, _, sub): """Inverts a filter object.""" return ~sub @handle(ast.And) def and_(self, _, lhs, rhs): """Joins two filter objects with an `and` operator.""" return lhs & rhs @handle(ast.Or) def or_(self, _, lhs, rhs): """Joins two filter objects with an `or` operator.""" return lhs | rhs @handle(ast.Equal, ast.NotEqual) def equality(self, node, lhs, rhs): """Creates a match filter.""" q = Q("match", **{lhs: rhs}) if node.op == ast.ComparisonOp.NE: q = ~q return q @handle(ast.LessThan, ast.LessEqual, ast.GreaterThan, ast.GreaterEqual) def comparison(self, node, lhs, rhs): """Creates a `range` filter.""" return Q("range", **{lhs: {COMPARISON_OP_MAP[node.op]: rhs}}) @handle(ast.Between) def between(self, node: ast.Between, lhs, low, high): """Creates a `range` filter.""" q = Q("range", **{lhs: {"gte": low, "lte": high}}) if node.not_: q = ~q return q @handle(ast.Like) def like(self, node: ast.Like, lhs): """Transforms the provided LIKE pattern to an Elasticsearch wildcard pattern. Thus, this only works properly on "wildcard" fields. Ignores case-sensitivity when Elasticsearch version is below 7.10.0. """ pattern = like_to_wildcard( node.pattern, node.wildcard, node.singlechar, node.escapechar ) expr: Dict[str, Union[str, bool]] = { "value": pattern, } if self.version >= VERSION_7_10_0: expr["case_insensitive"] = node.nocase q = Q("wildcard", **{lhs: expr}) if node.not_: q = ~q return q @handle(ast.In) def in_(self, node, lhs, *options): """Creates a `terms` filter.""" q = Q("terms", **{lhs: options}) if node.not_: q = ~q return q @handle(ast.IsNull) def null(self, node: ast.IsNull, lhs): """Performs a null check, by using the `exists` query on the given field. """ q = Q("exists", field=lhs) if not node.not_: q = ~q return q @handle(ast.Exists) def exists(self, node: ast.Exists, lhs): """Performs an existense check, by using the `exists` query on the given field """ q = Q("exists", field=lhs) if node.not_: q = ~q return q @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node: ast.TemporalPredicate, lhs, rhs): """Creates a filter to match the given temporal predicate""" op = node.op if isinstance(rhs, (date, datetime)): low = high = rhs else: low, high = rhs query = "range" not_ = False predicate: Dict[str, Union[date, datetime, str]] if op == ast.TemporalComparisonOp.DISJOINT: not_ = True predicate = {"gte": low, "lte": high} elif op == ast.TemporalComparisonOp.AFTER: predicate = {"gt": high} elif op == ast.TemporalComparisonOp.BEFORE: predicate = {"lt": low} elif ( op == ast.TemporalComparisonOp.TOVERLAPS or op == ast.TemporalComparisonOp.OVERLAPPEDBY ): predicate = {"gte": low, "lte": high} elif op == ast.TemporalComparisonOp.BEGINS: query = "term" predicate = {"value": low} elif op == ast.TemporalComparisonOp.BEGUNBY: query = "term" predicate = {"value": high} elif op == ast.TemporalComparisonOp.DURING: predicate = {"gt": low, "lt": high, "relation": "WITHIN"} elif op == ast.TemporalComparisonOp.TCONTAINS: predicate = {"gt": low, "lt": high, "relation": "CONTAINS"} # elif op == ast.TemporalComparisonOp.ENDS: # pass # elif op == ast.TemporalComparisonOp.ENDEDBY: # pass # elif op == ast.TemporalComparisonOp.TEQUALS: # pass # elif op == ast.TemporalComparisonOp.BEFORE_OR_DURING: # pass # elif op == ast.TemporalComparisonOp.DURING_OR_AFTER: # pass else: raise NotImplementedError(f"Unsupported temporal operator: {op}") q = Q( query, **{lhs: predicate}, ) if not_: q = ~q return q @handle( ast.GeometryIntersects, ast.GeometryDisjoint, ast.GeometryWithin, ast.GeometryContains, ) def spatial_comparison(self, node: ast.SpatialComparisonPredicate, lhs: str, rhs): """Creates a geo_shape query for the give spatial comparison predicate. """ return Q( "geo_shape", **{ lhs: { "shape": rhs, "relation": node.op.value.lower(), }, }, ) @handle(ast.BBox) def bbox(self, node: ast.BBox, lhs): """Performs a geo_shape query for the given bounding box. Ignores CRS parameter, as it is not supported by Elasticsearch. """ return Q( "geo_shape", **{ lhs: { "shape": self.envelope( values.Envelope(node.minx, node.maxx, node.miny, node.maxy) ), "relation": "intersects", }, }, ) @handle(ast.Attribute) def attribute(self, node: ast.Attribute): """Attribute mapping from filter fields to elasticsearch fields. If an attribute mapping is provided, it is used to look up the field name from there. """ if self.attribute_map is not None: return self.attribute_map[node.name] return node.name # @handle(ast.Arithmetic, subclasses=True) # def arithmetic(self, node: ast.Arithmetic, lhs, rhs): # op = ARITHMETIC_OP_MAP[node.op] # return f"({lhs} {op} {rhs})" # @handle(ast.Function) # def function(self, node, *arguments): # func = self.function_map[node.name] # return f"{func}({','.join(arguments)})" @handle(*values.LITERALS) def literal(self, node): """Literal values are directly passed to elasticsearch-dsl""" return node @handle(values.Geometry) def geometry(self, node: values.Geometry): """Geometry values are converted to a GeoJSON object""" return node.geometry @handle(values.Envelope) def envelope(self, node: values.Envelope): """Envelope values are converted to an GeoJSON Elasticsearch extension object.""" return { "type": "envelope", "coordinates": [ [ min(node.x1, node.x2), max(node.y1, node.y2), ], [ max(node.x1, node.x2), min(node.y1, node.y2), ], ], } def to_filter( root, attribute_map: Optional[Dict[str, str]] = None, version: Optional[str] = None, ): """Shorthand function to convert a pygeofilter AST to an Elasticsearch filter structure. """ return ElasticSearchDSLEvaluator( attribute_map, Version(version) if version else None ).evaluate(root) pygeofilter-0.3.1/pygeofilter/backends/elasticsearch/util.py000066400000000000000000000043231473475122500243240ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ General utilities for the Elasticsearch backend. """ import re def like_to_wildcard( value: str, wildcard: str, single_char: str, escape_char: str = "\\" ) -> str: """Adapts a "LIKE" pattern to create an elasticsearch "wildcard" pattern. """ x_wildcard = re.escape(wildcard) x_single_char = re.escape(single_char) if escape_char == "\\": x_escape_char = "\\\\\\\\" else: x_escape_char = re.escape(escape_char) if wildcard != "*": value = re.sub( f"(? # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from functools import wraps from typing import Any, Callable, Dict, List, Type, cast from .. import ast def get_all_subclasses(*classes: Type) -> List[Type]: """Utility function to get all the leaf-classes (classes that don't have any further sub-classes) from a given list of classes. """ all_subclasses = [] for cls in classes: subclasses = cls.__subclasses__() if subclasses: all_subclasses.extend(get_all_subclasses(*subclasses)) else: # directly insert classes that do not have any sub-classes all_subclasses.append(cls) return all_subclasses def handle(*node_classes: Type, subclasses: bool = False) -> Callable: """Function-decorator to mark a class function as a handler for a given node type. """ assert node_classes @wraps(handle) def inner(func): if subclasses: func.handles_classes = get_all_subclasses(*node_classes) else: func.handles_classes = node_classes return func return inner class EvaluatorMeta(type): """Metaclass for the ``Evaluator`` class to create a static map for all handler methods by their respective handled types. """ def __init__(cls, name, bases, dct): cls.handler_map = {} for base in bases: cls.handler_map.update(getattr(base, "handler_map")) for value in dct.values(): if hasattr(value, "handles_classes"): for handled_class in value.handles_classes: cls.handler_map[handled_class] = value class Evaluator(metaclass=EvaluatorMeta): """Base class for AST evaluators.""" handler_map: Dict[Type, Callable] def evaluate(self, node: ast.AstType, adopt_result: bool = True) -> Any: """Recursive function to evaluate an abstract syntax tree. For every node in the walked syntax tree, its registered handler is called with the node as first parameter and all pre-evaluated child nodes as star-arguments. When no handler was found for a given node, the ``adopt`` function is called with the node and its arguments, which by default raises an ``NotImplementedError``. """ sub_args = [] if hasattr(node, "get_sub_nodes"): subnodes = cast(ast.Node, node).get_sub_nodes() if subnodes: if isinstance(subnodes, list): sub_args = [self.evaluate(sub_node, False) for sub_node in subnodes] else: sub_args = [self.evaluate(subnodes, False)] handler = self.handler_map.get(type(node)) if handler is not None: result = handler(self, node, *sub_args) else: result = self.adopt(node, *sub_args) if adopt_result: return self.adopt_result(result) else: return result def adopt(self, node, *sub_args): """Interface function for a last resort when trying to evaluate a node and no handler was found. """ raise NotImplementedError(f"Failed to evaluate node of type {type(node)}") def adopt_result(self, result: Any) -> Any: """Interface function for adopting the final evaluation result if necessary. Default is no-op. """ return result pygeofilter-0.3.1/pygeofilter/backends/geopandas/000077500000000000000000000000001473475122500221225ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/geopandas/__init__.py000066400000000000000000000000001473475122500242210ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/geopandas/evaluate.py000066400000000000000000000107451473475122500243110ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from datetime import date, datetime, time, timedelta from shapely import geometry from ... import ast, values from ..evaluator import Evaluator, handle from . import filters LITERALS = (str, float, int, bool, datetime, date, time, timedelta) class GeoPandasEvaluator(Evaluator): def __init__(self, df, field_mapping=None, function_map=None): self.df = df self.field_mapping = field_mapping self.function_map = function_map @handle(ast.Not) def not_(self, node, sub): return filters.negate(sub) @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return filters.combine((lhs, rhs), node.op.value) @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): return filters.compare( lhs, rhs, node.op.value, ) @handle(ast.Between) def between(self, node, lhs, low, high): return filters.between(lhs, low, high, node.not_) @handle(ast.Like) def like(self, node, lhs): return filters.like( lhs, node.pattern, node.nocase, node.wildcard, node.singlechar, node.escapechar, node.not_, ) @handle(ast.In) def in_(self, node, lhs, *options): return filters.contains( lhs, options, node.not_, ) @handle(ast.IsNull) def null(self, node, lhs): return filters.null( lhs, node.not_, ) @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node, lhs, rhs): return filters.temporal( node.lhs, node.rhs, node.op.value, ) @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): return filters.spatial( lhs, rhs, node.op.name, ) @handle(ast.BBox) def bbox(self, node, lhs): return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs) @handle(ast.Attribute) def attribute(self, node): return filters.attribute(self.df, node.name, self.field_mapping) @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node, lhs, rhs): return filters.arithmetic(lhs, rhs, node.op.value) @handle(ast.Function) def function(self, node, *arguments): return self.function_map[node.name](*arguments) @handle(*values.LITERALS) def literal(self, node): return node @handle(values.Interval) def interval(self, node, start, end): return (start, end) @handle(values.Geometry) def geometry(self, node): return geometry.shape(node) @handle(values.Envelope) def envelope(self, node): return geometry.Polygon.from_bounds(node.x1, node.y1, node.x2, node.y2) def to_filter(df, root, field_mapping=None, function_map=None): """ """ return GeoPandasEvaluator(df, field_mapping, function_map).evaluate(root) pygeofilter-0.3.1/pygeofilter/backends/geopandas/filters.py000066400000000000000000000042771473475122500241560ustar00rootroot00000000000000from functools import reduce from operator import add, and_, eq, ge, gt, le, lt, mul, ne, or_, sub, truediv import shapely from ...util import like_pattern_to_re def combine(sub_filters, combinator: str): """Combine filters using a logical combinator""" assert combinator in ("AND", "OR") op = and_ if combinator == "AND" else or_ return reduce(lambda acc, q: op(acc, q) if acc is not None else q, sub_filters) def negate(sub_filter): """Negate a filter, opposing its meaning.""" return ~sub_filter OP_MAP = { "<": lt, "<=": le, ">": gt, ">=": ge, "<>": ne, "=": eq, } def compare(lhs, rhs, op): return OP_MAP[op](lhs, rhs) def between(lhs, low, high, not_): result = lhs.between(low, high) if not_: result = ~result return result def like(lhs, pattern, nocase, wildcard, singlechar, escapechar, not_): regex = like_pattern_to_re( pattern, nocase, wildcard, singlechar, escapechar or "\\" ) result = lhs.str.match(regex) if not_: result = ~result return result def contains(lhs, items, not_): # TODO: check if dataframe or scalar result = lhs.isin(items) if not_: result = ~result return result def null(lhs, not_): result = lhs.isnull() if not_: result = ~result return result def temporal(lhs, time_or_period, op): pass # TODO implement SPATIAL_OP_MAP = { "INTERSECTS": "intersects", "DISJOINT": "disjoint", "CONTAINS": "contains", "WITHIN": "within", "TOUCHES": "touches", "CROSSES": "crosses", "OVERLAPS": "overlaps", "EQUALS": "geom_equals", } def spatial(lhs, rhs, op): assert op in SPATIAL_OP_MAP return getattr(lhs, SPATIAL_OP_MAP[op])(rhs) def bbox(lhs, minx, miny, maxx, maxy, crs=None): box = shapely.geometry.Polygon.from_bounds(minx, miny, maxx, maxy) # TODO: handle CRS return lhs.intersects(box) def attribute(df, name, field_mapping=None): if field_mapping: name = field_mapping[name] return df[name] OP_TO_FUNC = {"+": add, "-": sub, "*": mul, "/": truediv} def arithmetic(lhs, rhs, op): """Create an arithmetic filter""" return OP_TO_FUNC[op](lhs, rhs) pygeofilter-0.3.1/pygeofilter/backends/native/000077500000000000000000000000001473475122500214475ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/native/__init__.py000066400000000000000000000000001473475122500235460ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/native/evaluate.py000066400000000000000000000340031473475122500236270ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from datetime import date, datetime, time, timedelta, timezone from typing import Any, Callable, Dict, Optional, Tuple, Union import shapely.geometry from ... import ast, values from ...util import like_pattern_to_re, parse_datetime from ..evaluator import Evaluator, handle COMPARISON_MAP = { ast.ComparisonOp.EQ: "==", ast.ComparisonOp.NE: "!=", ast.ComparisonOp.LT: "<", ast.ComparisonOp.LE: "<=", ast.ComparisonOp.GT: ">", ast.ComparisonOp.GE: ">=", } ARITHMETIC_MAP = { ast.ArithmeticOp.ADD: "+", ast.ArithmeticOp.SUB: "-", ast.ArithmeticOp.MUL: "*", ast.ArithmeticOp.DIV: "/", } ARRAY_COMPARISON_OP_MAP = { ast.ArrayComparisonOp.AEQUALS: "==", ast.ArrayComparisonOp.ACONTAINS: ">=", ast.ArrayComparisonOp.ACONTAINEDBY: "<=", ast.ArrayComparisonOp.AOVERLAPS: "&", } class NativeEvaluator(Evaluator): """This evaluator type allows to create a filter that can be used to filter objects or dicts. The filter is built using Python expressions which are then parsed using eval. The result is a callable object that can be used in any circumstance a normal function would. The callable object accepts a single parameter: the object to filter and returns a boolean if the object matches the filters or not. """ def __init__( self, function_map: Optional[Dict[str, Callable]] = None, attribute_map: Optional[Dict[str, str]] = None, use_getattr: bool = True, allow_nested_attributes: bool = True, ): """Constructs a NativeEvaluator. Args: function_map: a mapping of a function name to a callable function. attribute_map: a mapping of an external name to an internal field of the item to be filtered. The internal field specifier can be a JSON-Path that will be resolved against the passed in item. """ self.function_map = function_map if function_map is not None else {} self.attribute_map = attribute_map self.use_getattr = use_getattr self.allow_nested_attributes = allow_nested_attributes self.locals: Dict[str, Any] = {} self.local_count = 0 def _add_local(self, value: Any) -> str: "Add a value as a local variable to the expression." self.local_count += 1 key = f"local_{self.local_count}" self.locals[key] = value return key def _resolve_attribute(self, name): """Helper to resolve an attribute, either directly or via the integrated ``attribute_map`` """ if self.attribute_map is not None: if name in self.attribute_map: path = self.attribute_map[name] elif "*" in self.attribute_map: path = self.attribute_map["*"].replace("*", name) allow_nested_attributes = True else: path = name allow_nested_attributes = self.allow_nested_attributes parts = path.split(".") if not allow_nested_attributes and len(parts) > 1: raise Exception("Nested attributes are not allowed") return parts @handle(ast.Not) def not_(self, node, sub): return f"(not {sub})" @handle(ast.And) def and_(self, node, lhs, rhs): return f"({lhs} and {rhs})" @handle(ast.Or) def or_(self, node, lhs, rhs): return f"({lhs} or {rhs})" @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): op = COMPARISON_MAP[node.op] return f"({lhs} {op} {rhs})" @handle(ast.Between) def between(self, node, lhs, low, high): if node.not_: return f"({low} > {lhs} or {lhs} > {high})" else: return f"({low} <= {lhs} <= {high})" @handle(ast.Like) def like(self, node, lhs): maybe_not_inv = "" if node.not_ else "not " regex = like_pattern_to_re( node.pattern, node.nocase, node.wildcard, node.singlechar, node.escapechar ) key = self._add_local(regex) return f"({key}.match({lhs}) is {maybe_not_inv}None)" @handle(ast.In) def in_(self, node, lhs, *options): maybe_not = "not" if node.not_ else "" opts = ", ".join([f"{opt}" for opt in options]) return f"({lhs} {maybe_not} in ({opts}))" @handle(ast.IsNull) def null(self, node, lhs): maybe_not = "not " if node.not_ else "" return f"({lhs} is {maybe_not}None)" @handle(ast.Exists) def exists(self, node, lhs): parts = self._resolve_attribute(node.lhs.name) maybe_not = "not " if node.not_ else "" if self.use_getattr: cur = "item" for part in parts[:-1]: cur = f"getattr({cur}, {part!r}, None)" return f"({maybe_not}hasattr({cur}, {parts[-1]!r}))" else: getters = "".join(f".get({part!r}, {{}})" for part in parts[:-1]) return f"{parts[-1]!r} {maybe_not}in item{getters}" @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node, lhs, rhs): return ( f"(relate_intervals(to_interval({lhs})," f"to_interval({rhs})) == " f"ast.TemporalComparisonOp.{node.op.name})" ) @handle(ast.ArrayPredicate, subclasses=True) def array(self, node, lhs, rhs): op = ARRAY_COMPARISON_OP_MAP[node.op] return f"bool(set({lhs}) {op} set({rhs}))" @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): return f"(getattr(ensure_spatial({lhs}), " f"{node.op.value.lower()!r})({rhs}))" @handle(ast.Relate) def spatial_pattern(self, node, lhs, rhs): return f"(ensure_spatial({lhs}).relate_pattern({rhs}, {node.pattern!r}))" @handle(ast.BBox) def bbox(self, node, lhs): bbox_local = self._add_local( shapely.geometry.Polygon.from_bounds( node.minx, node.miny, node.maxx, node.maxy ) ) return f"(ensure_spatial({lhs}).intersects({bbox_local}))" @handle(ast.Attribute) def attribute(self, node): parts = self._resolve_attribute(node.name) if self.use_getattr: cur = "item" for part in parts: cur = f"getattr({cur}, {part!r}, None)" return cur else: getters = "".join(f".get({part!r})" for part in parts) return f"item{getters}" @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node, lhs, rhs): op = ARITHMETIC_MAP[node.op] return f"({lhs}) {op} ({rhs})" @handle(ast.Function) def function(self, node, *arguments): args = ", ".join([f"({arg})" for arg in arguments]) return f"{node.name}({args})" @handle(*values.LITERALS) def literal(self, node): key = self._add_local(node) return key @handle(values.Interval) def interval(self, node, low, high): return f"values.Interval({low}, {high})" @handle(values.Geometry) def geometry(self, node): key = self._add_local(shapely.geometry.shape(node)) return key @handle(values.Envelope) def envelope(self, node): key = self._add_local( shapely.geometry.Polygon.from_bounds(node.x1, node.y1, node.x2, node.y2) ) return key def adopt_result(self, result): """Turns the compiled expression into a callable object using ``eval``. Literals are passed in as well as the function map. """ expression = f"lambda item: {result}" globals_ = { "relate_intervals": relate_intervals, "to_interval": to_interval, "ensure_spatial": ensure_spatial, "ast": ast, "values": values, } if not set(globals_).isdisjoint(set(self.function_map)): raise ValueError( f"globals collision {list(globals_)} and " f"{list(self.function_map)}" ) globals_.update(self.function_map) globals_.update(self.locals) # clear any locals for later use self.locals.clear() return eval(expression, globals_) MaybeInterval = Union[values.Interval, date, datetime, str, None] InternalInterval = Tuple[Optional[datetime], Optional[datetime]] def _interval_to_internal_interval(value: values.Interval) -> InternalInterval: low = value.start high = value.end # convert low and high dates to their respective datetime # by using 00:00 time for the low part and 23:59:59 for the high # part if isinstance(low, date): low = datetime.combine(low, time.min, timezone.utc) if isinstance(high, date): high = datetime.combine(high, time.max, timezone.utc) # low and high are now either datetimes, timedeltas or None if isinstance(low, timedelta): if isinstance(high, datetime): low = high - low else: raise ValueError(f"Cannot combine {low} with {high}") elif isinstance(high, timedelta): if isinstance(low, datetime): high = low + high else: raise ValueError(f"Cannot combine {low} with {high}") return (low, high) def to_interval(value: MaybeInterval) -> InternalInterval: """Converts the given value to an interval tuple of ``start``/``stop`` as Python datetime objects. - ``values.Interval`` objects are expanded to two datetimes: - two datetimes are returned as such - a date is transformed to a datetime, where the ``time`` component is either ``time.min`` for start or ``time.max`` for then end component. - if either the start or end is a ``timedelta`` object, that value is either added to the start value or subtracted from the end value. - ``date`` objects are transformed to two datetimes for the ``time.min`` and ``time.end`` of that date in UTC. - ``datetime`` and ``str`` objects are an interval with both start and end of the same value. Strings are parsed beforehand. - ``None`` is simply returned as ``(None, None)`` """ if isinstance(value, str): value = parse_datetime(value) if not value.tzinfo: value = value.replace(tzinfo=timezone.utc) return (value, value) elif isinstance(value, values.Interval): return _interval_to_internal_interval(value) elif isinstance(value, datetime): return (value, value) elif isinstance(value, date): return ( datetime.combine(value, time.min, timezone.utc), datetime.combine(value, time.max, timezone.utc), ) elif value is None: return (None, None) raise ValueError(f"Invalid type {type(value)}") def relate_intervals( # noqa: C901 lhs: InternalInterval, rhs: InternalInterval ) -> ast.TemporalComparisonOp: """Relates two intervals (tuples of two ``datetime`` or ``None`` values) and returns the associated ``ast.TemporalComparisonOp`` value. """ ll, lh = lhs rl, rh = rhs if ll is None or lh is None or rl is None or rh is None: # TODO: handle open ended intervals (None on either side) return ast.TemporalComparisonOp.DISJOINT elif lh < rl: return ast.TemporalComparisonOp.BEFORE elif ll > rh: return ast.TemporalComparisonOp.AFTER elif lh == rl: return ast.TemporalComparisonOp.MEETS elif ll == rh: return ast.TemporalComparisonOp.METBY elif ll < rl and rl < lh < rh: return ast.TemporalComparisonOp.TOVERLAPS elif rl < ll < rh and lh > rh: return ast.TemporalComparisonOp.OVERLAPPEDBY elif ll == rl and lh < rh: return ast.TemporalComparisonOp.BEGINS elif ll == rl and lh > rh: return ast.TemporalComparisonOp.BEGUNBY elif ll > rl and lh < rh: return ast.TemporalComparisonOp.DURING elif ll < rl and lh > rh: return ast.TemporalComparisonOp.TCONTAINS elif ll > rl and lh == rh: return ast.TemporalComparisonOp.ENDS elif ll < rl and lh == rh: return ast.TemporalComparisonOp.ENDEDBY elif ll == rl and lh == rh: return ast.TemporalComparisonOp.TEQUALS raise ValueError(f"Error relating intervals [{ll}, {lh}] and [{rl}, {rh}]") def ensure_spatial(value: Any) -> shapely.geometry.base.BaseGeometry: """Ensures that a given value is a shapely geometry. If it is already it is passed through, otherwise it is tried to be parsed via ``shapely.geometry.shape``. """ if isinstance(value, shapely.geometry.base.BaseGeometry): return value return shapely.geometry.shape(value) pygeofilter-0.3.1/pygeofilter/backends/opensearch/000077500000000000000000000000001473475122500223105ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/opensearch/__init__.py000066400000000000000000000030561473475122500244250ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ OpenSearch backend for pygeofilter. """ from .evaluate import to_filter __all__ = ["to_filter"] pygeofilter-0.3.1/pygeofilter/backends/opensearch/evaluate.py000066400000000000000000000234721473475122500245000ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ OpenSearch filter evaluator. Uses opensearch-dsl package to create filter objects. """ # pylint: disable=E1130,C0103,W0223 from datetime import date, datetime from typing import Dict, Optional, Union from opensearch_dsl import Q from packaging.version import Version from ... import ast, values from ..evaluator import Evaluator, handle from .util import like_to_wildcard VERSION_7_10_0 = Version("7.10.0") COMPARISON_OP_MAP = { ast.ComparisonOp.LT: "lt", ast.ComparisonOp.LE: "lte", ast.ComparisonOp.GT: "gt", ast.ComparisonOp.GE: "gte", } ARITHMETIC_OP_MAP = { ast.ArithmeticOp.ADD: "+", ast.ArithmeticOp.SUB: "-", ast.ArithmeticOp.MUL: "*", ast.ArithmeticOp.DIV: "/", } class OpenSearchDSLEvaluator(Evaluator): """A filter evaluator for OpenSearch DSL.""" def __init__( self, attribute_map: Optional[Dict[str, str]] = None, version: Optional[Version] = None, ): self.attribute_map = attribute_map self.version = version or Version("7.1.0") @handle(ast.Not) def not_(self, _, sub): """Inverts a filter object.""" return ~sub @handle(ast.And) def and_(self, _, lhs, rhs): """Joins two filter objects with an `and` operator.""" return lhs & rhs @handle(ast.Or) def or_(self, _, lhs, rhs): """Joins two filter objects with an `or` operator.""" return lhs | rhs @handle(ast.Equal, ast.NotEqual) def equality(self, node, lhs, rhs): """Creates a match filter.""" q = Q("match", **{lhs: rhs}) if node.op == ast.ComparisonOp.NE: q = ~q return q @handle(ast.LessThan, ast.LessEqual, ast.GreaterThan, ast.GreaterEqual) def comparison(self, node, lhs, rhs): """Creates a `range` filter.""" return Q("range", **{lhs: {COMPARISON_OP_MAP[node.op]: rhs}}) @handle(ast.Between) def between(self, node: ast.Between, lhs, low, high): """Creates a `range` filter.""" q = Q("range", **{lhs: {"gte": low, "lte": high}}) if node.not_: q = ~q return q @handle(ast.Like) def like(self, node: ast.Like, lhs): """Transforms the provided LIKE pattern to an OpenSearch wildcard pattern. Thus, this only works properly on "wildcard" fields. Ignores case-sensitivity when OpenSearch version is below 7.10.0. """ pattern = like_to_wildcard( node.pattern, node.wildcard, node.singlechar, node.escapechar ) expr: Dict[str, Union[str, bool]] = { "value": pattern, } if self.version >= VERSION_7_10_0: expr["case_insensitive"] = node.nocase q = Q("wildcard", **{lhs: expr}) if node.not_: q = ~q return q @handle(ast.In) def in_(self, node, lhs, *options): """Creates a `terms` filter.""" q = Q("terms", **{lhs: options}) if node.not_: q = ~q return q @handle(ast.IsNull) def null(self, node: ast.IsNull, lhs): """Performs a null check, by using the `exists` query on the given field. """ q = Q("exists", field=lhs) if not node.not_: q = ~q return q @handle(ast.Exists) def exists(self, node: ast.Exists, lhs): """Performs an existense check, by using the `exists` query on the given field """ q = Q("exists", field=lhs) if node.not_: q = ~q return q @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node: ast.TemporalPredicate, lhs, rhs): """Creates a filter to match the given temporal predicate""" op = node.op if isinstance(rhs, (date, datetime)): low = high = rhs else: low, high = rhs query = "range" not_ = False predicate: Dict[str, Union[date, datetime, str]] if op == ast.TemporalComparisonOp.DISJOINT: not_ = True predicate = {"gte": low, "lte": high} elif op == ast.TemporalComparisonOp.AFTER: predicate = {"gt": high} elif op == ast.TemporalComparisonOp.BEFORE: predicate = {"lt": low} elif ( op == ast.TemporalComparisonOp.TOVERLAPS or op == ast.TemporalComparisonOp.OVERLAPPEDBY ): predicate = {"gte": low, "lte": high} elif op == ast.TemporalComparisonOp.BEGINS: query = "term" predicate = {"value": low} elif op == ast.TemporalComparisonOp.BEGUNBY: query = "term" predicate = {"value": high} elif op == ast.TemporalComparisonOp.DURING: predicate = {"gt": low, "lt": high, "relation": "WITHIN"} elif op == ast.TemporalComparisonOp.TCONTAINS: predicate = {"gt": low, "lt": high, "relation": "CONTAINS"} # elif op == ast.TemporalComparisonOp.ENDS: # pass # elif op == ast.TemporalComparisonOp.ENDEDBY: # pass # elif op == ast.TemporalComparisonOp.TEQUALS: # pass # elif op == ast.TemporalComparisonOp.BEFORE_OR_DURING: # pass # elif op == ast.TemporalComparisonOp.DURING_OR_AFTER: # pass else: raise NotImplementedError(f"Unsupported temporal operator: {op}") q = Q( query, **{lhs: predicate}, ) if not_: q = ~q return q @handle( ast.GeometryIntersects, ast.GeometryDisjoint, ast.GeometryWithin, ast.GeometryContains, ) def spatial_comparison(self, node: ast.SpatialComparisonPredicate, lhs: str, rhs): """Creates a geo_shape query for the give spatial comparison predicate. """ return Q( "geo_shape", **{ lhs: { "shape": rhs, "relation": node.op.value.lower(), }, }, ) @handle(ast.BBox) def bbox(self, node: ast.BBox, lhs): """Performs a geo_shape query for the given bounding box. Ignores CRS parameter, as it is not supported by OpenSearch. """ return Q( "geo_shape", **{ lhs: { "shape": self.envelope( values.Envelope(node.minx, node.maxx, node.miny, node.maxy) ), "relation": "intersects", }, }, ) @handle(ast.Attribute) def attribute(self, node: ast.Attribute): """Attribute mapping from filter fields to OpenSearch fields. If an attribute mapping is provided, it is used to look up the field name from there. """ if self.attribute_map is not None: return self.attribute_map[node.name] return node.name # @handle(ast.Arithmetic, subclasses=True) # def arithmetic(self, node: ast.Arithmetic, lhs, rhs): # op = ARITHMETIC_OP_MAP[node.op] # return f"({lhs} {op} {rhs})" # @handle(ast.Function) # def function(self, node, *arguments): # func = self.function_map[node.name] # return f"{func}({','.join(arguments)})" @handle(*values.LITERALS) def literal(self, node): """Literal values are directly passed to opensearch-dsl""" return node @handle(values.Geometry) def geometry(self, node: values.Geometry): """Geometry values are converted to a GeoJSON object""" return node.geometry @handle(values.Envelope) def envelope(self, node: values.Envelope): """Envelope values are converted to an GeoJSON OpenSearch extension object.""" return { "type": "envelope", "coordinates": [ [ min(node.x1, node.x2), max(node.y1, node.y2), ], [ max(node.x1, node.x2), min(node.y1, node.y2), ], ], } def to_filter( root, attribute_map: Optional[Dict[str, str]] = None, version: Optional[str] = None, ): """Shorthand function to convert a pygeofilter AST to an OpenSearch filter structure. """ return OpenSearchDSLEvaluator( attribute_map, Version(version) if version else None ).evaluate(root) pygeofilter-0.3.1/pygeofilter/backends/opensearch/util.py000066400000000000000000000043151473475122500236420ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2022 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ General utilities for the OpenSearch backend. """ import re def like_to_wildcard( value: str, wildcard: str, single_char: str, escape_char: str = "\\" ) -> str: """Adapts a "LIKE" pattern to create an OpenSearch "wildcard" pattern. """ x_wildcard = re.escape(wildcard) x_single_char = re.escape(single_char) if escape_char == "\\": x_escape_char = "\\\\\\\\" else: x_escape_char = re.escape(escape_char) if wildcard != "*": value = re.sub( f"(? # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import operator from datetime import date, datetime, time, timedelta from typing import Callable, Dict, Optional import shapely from .. import ast, values from ..util import like_pattern_to_re from .evaluator import Evaluator, handle COMPARISON_MAP = { "=": operator.eq, "<>": operator.ne, "<": operator.lt, "<=": operator.le, ">": operator.gt, ">=": operator.ge, } ARITHMETIC_MAP = { "+": operator.add, "-": operator.sub, "*": operator.mul, "/": operator.truediv, } def is_literal(value): return isinstance(value, values.LITERALS) TEMPORAL_LITERALS = (date, datetime, time, timedelta, values.Interval) def is_temporal_literal(value): return isinstance(value, TEMPORAL_LITERALS) GEOMETRY_LITERALS = (values.Geometry, values.Envelope) def is_geometry_literal(value): return isinstance(value, GEOMETRY_LITERALS) def is_any_literal(value): return is_literal(value) or is_temporal_literal(value) or is_geometry_literal(value) def to_geometry(value): if isinstance(value, values.Geometry): return shapely.geometry.shape(value) elif isinstance(value, values.Envelope): return shapely.geometry.Polygon.from_bounds( value.x1, value.y1, value.x2, value.y2 ) raise ValueError(str(type(value))) class OptimizeEvaluator(Evaluator): def __init__(self, function_map: Dict[str, Callable]): self.function_map = function_map @handle(ast.Not) def not_(self, node, sub): if isinstance(sub, bool): return operator.not_(sub) else: return ast.Not(sub) @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): if isinstance(lhs, bool) and isinstance(rhs, bool): op = operator.and_ if node.op.value == "AND" else operator.or_ return op(lhs, rhs) elif isinstance(lhs, bool) or isinstance(rhs, bool): if isinstance(lhs, bool): certain, uncertain = lhs, rhs else: certain, uncertain = rhs, lhs # for OR nodes, when we have one true branch, the other # can be dropped. Otherwise we can shorthand to the # uncertain branch if node.op.value == "OR": if certain: return True else: return uncertain # for AND nodes, we can drop the node if the certain one is # false. Otherwise we can shorthand to the other elif node.op.value == "AND": if certain: return uncertain else: return False # we can eliminate the whole node and its sub-nodes, as it # will always evaluate to false return False else: return (ast.And if node.op.value == "AND" else ast.Or)(lhs, rhs) @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): if is_literal(lhs) and is_literal(rhs): op = COMPARISON_MAP[node.op.value] return op(lhs, rhs) else: return type(node)(lhs, rhs) @handle(ast.Between) def between(self, node, lhs, low, high): if is_literal(lhs) and is_literal(low) and is_literal(high): result = low <= lhs <= high if node.not_: result = not result return result else: return ast.Between(lhs, low, high, node.not_) @handle(ast.Like) def like(self, node, lhs): if is_literal(lhs): regex = like_pattern_to_re( node.pattern, node.nocase, node.wildcard, node.singlechar, node.escapechar, ) result = regex.match(lhs) is not None if node.not_: result = not result return result else: return ast.Like( lhs, node.pattern, node.nocase, node.wildcard, node.singlechar, node.escapechar, node.not_, ) @handle(ast.In) def in_(self, node, lhs, *options): if is_literal(lhs) and all(is_literal(o) for o in options): result = lhs in options if node.not_: result = not result return result else: return ast.In(lhs, list(options), node.not_) @handle(ast.IsNull) def null(self, node, lhs): return ast.IsNull(lhs, node.not_) @handle(ast.Exists) def exists(self, node, lhs): return ast.Exists(lhs, node.not_) @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node, lhs, rhs): if is_temporal_literal(lhs) and is_temporal_literal(rhs): lhs = to_interval(lhs) rhs = to_interval(rhs) return node.op.value == relate_intervals(lhs, rhs) else: return type(node)(lhs, rhs) @handle(ast.ArrayPredicate, subclasses=True) def array(self, node, lhs, rhs): if isinstance(lhs, list) and isinstance(rhs, list): left = set(lhs) right = set(rhs) if node.op == ast.ArrayComparisonOp.AEQUALS: return left == right elif node.op == ast.ArrayComparisonOp.ACONTAINS: return left >= right elif node.op == ast.ArrayComparisonOp.ACONTAINEDBY: return left <= right elif node.op == ast.ArrayComparisonOp.AOVERLAPS: return bool(left & right) else: return type(node)(lhs, rhs) @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): if is_geometry_literal(lhs) and is_geometry_literal(rhs): lhs = to_geometry(lhs) rhs = to_geometry(rhs) op = getattr(lhs, node.op.value.lower()) return op(rhs) else: return type(node)(lhs, rhs) @handle(ast.Relate) def spatial_pattern(self, node, lhs, rhs): if is_geometry_literal(lhs) and isinstance(rhs, str): lhs = to_geometry(lhs) return lhs.relate_pattern(rhs, node.pattern) else: return ast.Relate(lhs, rhs, node.pattern) @handle(ast.SpatialDistancePredicate) def distance(self, node, lhs, rhs): # TODO: can this be reduced? return type(node)(lhs, rhs, node.distance, node.units) @handle(ast.BBox) def bbox(self, node, lhs): if is_geometry_literal(lhs): lhs = to_geometry(lhs) return lhs.intersects( shapely.geometry.Polygon.from_bounds( node.minx, node.miny, node.maxx, node.maxy ) ) else: return ast.BBox(lhs, node.minx, node.miny, node.maxx, node.maxy) @handle(ast.Attribute) def attribute(self, node): return node @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node, lhs, rhs): if is_literal(lhs) and is_literal(rhs): op = ARITHMETIC_MAP[node.op.value] return op(lhs, rhs) else: return type(node)(lhs, rhs) @handle(ast.Function) def function(self, node, *arguments): func = self.function_map.get(node.name) if func and all(is_any_literal(a) for a in arguments): return func(*arguments) else: return ast.Function(node.name, list(arguments)) # just pass through these nodes @handle(ast.Attribute, values.Geometry, values.Envelope, *values.LITERALS) def literal(self, node): return node def to_interval(value): # TODO: zulu = None if isinstance(value, values.Interval): low = value.start high = value.end if isinstance(low, date): low = datetime.combine(low, time.min, zulu) if isinstance(high, date): high = datetime.combine(high, time.max, zulu) if isinstance(low, timedelta): low = high - timedelta elif isinstance(high, timedelta): high = low + timedelta return (low, high) elif isinstance(value, date): return ( datetime.combine(value, time.min, zulu), datetime.combine(value, time.max, zulu), ) elif isinstance(value, datetime): return (value, value) raise ValueError(f"Invalid type {type(value)}") def relate_intervals(lhs, rhs): # noqa: C901 ll, lh = lhs rl, rh = rhs if lh < rl: return "BEFORE" elif ll > rh: return "AFTER" elif lh == rl: return "MEETS" elif ll == rh: return "METBY" elif ll < rl and rl < lh < rh: return "TOVERLAPS" elif rl < ll < rh and lh > rh: return "OVERLAPPEDBY" elif ll == rl and lh < rh: return "BEGINS" elif ll == rl and lh > rh: return "BEGUNBY" elif ll > rl and lh < rh: return "DURING" elif ll < rl and lh > rh: return "TCONTAINS" elif ll > rl and lh == rh: return "TENDS" elif ll < rl and lh == rh: return "ENDEDBY" elif ll == rl and lh == rh: return "TEQUALS" raise ValueError(f"Error relating intervals [{ll}, {lh}] and ({rl}, {rh})") def optimize( root: ast.Node, function_map: Optional[Dict[str, Callable]] = None ) -> ast.Node: result = OptimizeEvaluator(function_map or {}).evaluate(root) if isinstance(result, bool): result = ast.Include(not result) return result pygeofilter-0.3.1/pygeofilter/backends/oraclesql/000077500000000000000000000000001473475122500221465ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/oraclesql/__init__.py000066400000000000000000000032071473475122500242610ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Andreas Kosubek # Bernhard Mallinger # ------------------------------------------------------------------------------ # Copyright (C) 2024 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from .evaluate import to_sql_where, to_sql_where_with_bind_variables __all__ = ["to_sql_where", "to_sql_where_with_bind_variables"] pygeofilter-0.3.1/pygeofilter/backends/oraclesql/evaluate.py000066400000000000000000000215031473475122500243270ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Andreas Kosubek # Bernhard Mallinger # # ------------------------------------------------------------------------------ # Copyright (C) 2023 Agrar Markt Austria # Copyright (C) 2024 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from typing import Any, Dict, Optional, Tuple from ... import ast, values from ..evaluator import Evaluator, handle COMPARISON_OP_MAP = { ast.ComparisonOp.EQ: "=", ast.ComparisonOp.NE: "<>", ast.ComparisonOp.LT: "<", ast.ComparisonOp.LE: "<=", ast.ComparisonOp.GT: ">", ast.ComparisonOp.GE: ">=", } ARITHMETIC_OP_MAP = { ast.ArithmeticOp.ADD: "+", ast.ArithmeticOp.SUB: "-", ast.ArithmeticOp.MUL: "*", ast.ArithmeticOp.DIV: "/", } SPATIAL_COMPARISON_OP_MAP = { ast.SpatialComparisonOp.INTERSECTS: "ANYINTERACT", ast.SpatialComparisonOp.DISJOINT: "DISJOINT", ast.SpatialComparisonOp.CONTAINS: "CONTAINS", ast.SpatialComparisonOp.WITHIN: "INSIDE", ast.SpatialComparisonOp.TOUCHES: "TOUCH", ast.SpatialComparisonOp.CROSSES: "OVERLAPBDYDISJOINT", ast.SpatialComparisonOp.OVERLAPS: "OVERLAPBDYINTERSECT", ast.SpatialComparisonOp.EQUALS: "EQUAL", } class OracleSQLEvaluator(Evaluator): bind_variables: Dict[str, Any] def __init__(self, attribute_map: Dict[str, str], function_map: Dict[str, str]): self.attribute_map = attribute_map self.function_map = function_map self.with_bind_variables = False self.bind_variables = {} # Counter for bind variables self.b_cnt = 0 @handle(ast.Not) def not_(self, node, sub): return f"NOT {sub}" @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return f"({lhs} {node.op.value} {rhs})" @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): if self.with_bind_variables: self.bind_variables[f"{lhs}_{self.b_cnt}"] = rhs sql = f"({lhs} {COMPARISON_OP_MAP[node.op]} :{lhs}_{self.b_cnt})" self.b_cnt += 1 else: sql = f"({lhs} {COMPARISON_OP_MAP[node.op]} {rhs})" return sql @handle(ast.Between) def between(self, node, lhs, low, high): if self.with_bind_variables: self.bind_variables[f"{lhs}_high_{self.b_cnt}"] = high self.bind_variables[f"{lhs}_low_{self.b_cnt}"] = low sql = ( f"({lhs} {'NOT ' if node.not_ else ''}BETWEEN " f":{lhs}_low_{self.b_cnt} AND :{lhs}_high_{self.b_cnt})" ) self.b_cnt += 1 else: sql = f"({lhs} {'NOT ' if node.not_ else ''}BETWEEN " f"{low} AND {high})" return sql @handle(ast.Like) def like(self, node, lhs): pattern = node.pattern if node.wildcard != "%": pattern = pattern.replace(node.wildcard, "%") if node.singlechar != "_": pattern = pattern.replace(node.singlechar, "_") if self.with_bind_variables: self.bind_variables[f"{lhs}_{self.b_cnt}"] = pattern sql = f"{lhs} {'NOT ' if node.not_ else ''}LIKE " sql += f":{lhs}_{self.b_cnt} ESCAPE '{node.escapechar}'" else: sql = f"{lhs} {'NOT ' if node.not_ else ''}LIKE " sql += f"'{pattern}' ESCAPE '{node.escapechar}'" return sql @handle(ast.In) def in_(self, node, lhs, *options): return f"{lhs} {'NOT ' if node.not_ else ''}IN ({', '.join(options)})" @handle(ast.IsNull) def null(self, node, lhs): return f"{lhs} IS {'NOT ' if node.not_ else ''}NULL" @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): param = f"mask={SPATIAL_COMPARISON_OP_MAP[node.op]}" func = f"SDO_RELATE({lhs}, {rhs}, '{param}') = 'TRUE'" return func @handle(ast.BBox) def bbox(self, node, lhs): geo_json = json.dumps( { "type": "Polygon", "coordinates": [ [ [node.minx, node.miny], [node.minx, node.maxy], [node.maxx, node.maxy], [node.maxx, node.miny], [node.minx, node.miny], ] ], } ) srid = 4326 param = "mask=ANYINTERACT" if self.with_bind_variables: self.bind_variables[f"geo_json_{self.b_cnt}"] = geo_json self.bind_variables[f"srid_{self.b_cnt}"] = srid geom_sql = ( f"SDO_UTIL.FROM_JSON(geometry => :geo_json_{self.b_cnt}, " f"srid => :srid_{self.b_cnt})" ) self.b_cnt += 1 else: geom_sql = ( f"SDO_UTIL.FROM_JSON(geometry => '{geo_json}', " f"srid => {srid})" ) sql = f"SDO_RELATE({lhs}, {geom_sql}, '{param}') = 'TRUE'" return sql @handle(ast.Attribute) def attribute(self, node: ast.Attribute): return f"{self.attribute_map[node.name]}" @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node: ast.Arithmetic, lhs, rhs): op = ARITHMETIC_OP_MAP[node.op] return f"({lhs} {op} {rhs})" @handle(ast.Function) def function(self, node, *arguments): func = self.function_map[node.name] return f"{func}({','.join(arguments)})" @handle(*values.LITERALS) def literal(self, node): if isinstance(node, str): return f"'{node}'" else: return node @handle(values.Geometry) def geometry(self, node: values.Geometry): # TODO Read CRS information from # node and translate to SRID srid = 4326 geo_json = json.dumps(node.geometry) print(geo_json) if self.with_bind_variables: self.bind_variables[f"geo_json_{self.b_cnt}"] = geo_json self.bind_variables[f"srid_{self.b_cnt}"] = srid sql = ( f"SDO_UTIL.FROM_JSON(geometry => :geo_json_{self.b_cnt}, " f"srid => :srid_{self.b_cnt})" ) self.b_cnt += 1 else: sql = f"SDO_UTIL.FROM_JSON(geometry => '{geo_json}', " f"srid => {srid})" return sql @handle(values.Envelope) def envelope(self, node: values.Envelope): # TODO Read CRS information from # node and translate to SRID srid = 4326 geo_json = json.dumps(node.geometry) if self.with_bind_variables: self.bind_variables[f"geo_json_{self.b_cnt}"] = geo_json self.bind_variables[f"srid_{self.b_cnt}"] = srid sql = ( f"SDO_UTIL.FROM_JSON(geometry => :geo_json_{self.b_cnt}, " f"srid => :srid_{self.b_cnt})" ) self.b_cnt += 1 else: sql = f"SDO_UTIL.FROM_JSON(geometry => '{geo_json}', " f"srid => {srid})" return sql def to_sql_where( root: ast.Node, field_mapping: Dict[str, str], function_map: Optional[Dict[str, str]] = None, ) -> str: orcle = OracleSQLEvaluator(field_mapping, function_map or {}) orcle.with_bind_variables = False return orcle.evaluate(root) def to_sql_where_with_bind_variables( root: ast.Node, field_mapping: Dict[str, str], function_map: Optional[Dict[str, str]] = None, ) -> Tuple[str, Dict[str, Any]]: orcle = OracleSQLEvaluator(field_mapping, function_map or {}) orcle.with_bind_variables = True orcle.bind_variables = {} return orcle.evaluate(root), orcle.bind_variables pygeofilter-0.3.1/pygeofilter/backends/sql/000077500000000000000000000000001473475122500207605ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/sql/__init__.py000066400000000000000000000000771473475122500230750ustar00rootroot00000000000000from .evaluate import to_sql_where __all__ = ["to_sql_where"] pygeofilter-0.3.1/pygeofilter/backends/sql/evaluate.py000066400000000000000000000133031473475122500231400ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from typing import Dict, Optional import shapely.geometry from ... import ast, values from ..evaluator import Evaluator, handle COMPARISON_OP_MAP = { ast.ComparisonOp.EQ: "=", ast.ComparisonOp.NE: "<>", ast.ComparisonOp.LT: "<", ast.ComparisonOp.LE: "<=", ast.ComparisonOp.GT: ">", ast.ComparisonOp.GE: ">=", } ARITHMETIC_OP_MAP = { ast.ArithmeticOp.ADD: "+", ast.ArithmeticOp.SUB: "-", ast.ArithmeticOp.MUL: "*", ast.ArithmeticOp.DIV: "/", } SPATIAL_COMPARISON_OP_MAP = { ast.SpatialComparisonOp.INTERSECTS: "ST_Intersects", ast.SpatialComparisonOp.DISJOINT: "ST_Disjoint", ast.SpatialComparisonOp.CONTAINS: "ST_Contains", ast.SpatialComparisonOp.WITHIN: "ST_Within", ast.SpatialComparisonOp.TOUCHES: "ST_Touches", ast.SpatialComparisonOp.CROSSES: "ST_Crosses", ast.SpatialComparisonOp.OVERLAPS: "ST_Overlaps", ast.SpatialComparisonOp.EQUALS: "ST_Equals", } class SQLEvaluator(Evaluator): def __init__(self, attribute_map: Dict[str, str], function_map: Dict[str, str]): self.attribute_map = attribute_map self.function_map = function_map @handle(ast.Not) def not_(self, node, sub): return f"NOT {sub}" @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return f"({lhs} {node.op.value} {rhs})" @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): return f"({lhs} {COMPARISON_OP_MAP[node.op]} {rhs})" @handle(ast.Between) def between(self, node, lhs, low, high): return f"({lhs} {'NOT ' if node.not_ else ''}BETWEEN {low} AND {high})" @handle(ast.Like) def like(self, node, lhs): pattern = node.pattern if node.wildcard != "%": # TODO: not preceded by escapechar pattern = pattern.replace(node.wildcard, "%") if node.singlechar != "_": # TODO: not preceded by escapechar pattern = pattern.replace(node.singlechar, "_") # TODO: handle node.nocase return ( f"{lhs} {'NOT ' if node.not_ else ''}LIKE " f"'{pattern}' ESCAPE '{node.escapechar}'" ) @handle(ast.In) def in_(self, node, lhs, *options): return f"{lhs} {'NOT ' if node.not_ else ''}IN ({', '.join(options)})" @handle(ast.IsNull) def null(self, node, lhs): return f"{lhs} IS {'NOT ' if node.not_ else ''}NULL" # @handle(ast.TemporalPredicate, subclasses=True) # def temporal(self, node, lhs, rhs): # pass @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): func = SPATIAL_COMPARISON_OP_MAP[node.op] return f"{func}({lhs},{rhs})" @handle(ast.BBox) def bbox(self, node, lhs): func = SPATIAL_COMPARISON_OP_MAP[ast.SpatialComparisonOp.INTERSECTS] rhs = f"ST_GeomFromText('POLYGON(({node.minx} {node.miny}, {node.minx} {node.maxy}, {node.maxx} {node.maxy}, {node.maxx} {node.miny}, {node.minx} {node.miny}))')" # noqa return f"{func}({lhs},{rhs})" @handle(ast.Attribute) def attribute(self, node: ast.Attribute): return f'"{self.attribute_map[node.name]}"' @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node: ast.Arithmetic, lhs, rhs): op = ARITHMETIC_OP_MAP[node.op] return f"({lhs} {op} {rhs})" @handle(ast.Function) def function(self, node, *arguments): func = self.function_map[node.name] return f"{func}({','.join(arguments)})" @handle(*values.LITERALS) def literal(self, node): if isinstance(node, str): return f"'{node}'" else: # TODO: return str(node) @handle(values.Geometry) def geometry(self, node: values.Geometry): wkb_hex = shapely.geometry.shape(node).wkb_hex return f"ST_GeomFromWKB(x'{wkb_hex}')" @handle(values.Envelope) def envelope(self, node: values.Envelope): wkb_hex = shapely.geometry.box(node.x1, node.y1, node.x2, node.y2).wkb_hex return f"ST_GeomFromWKB(x'{wkb_hex}')" def to_sql_where( root: ast.Node, field_mapping: Dict[str, str], function_map: Optional[Dict[str, str]] = None, ) -> str: return SQLEvaluator(field_mapping, function_map or {}).evaluate(root) pygeofilter-0.3.1/pygeofilter/backends/sqlalchemy/000077500000000000000000000000001473475122500223235ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/backends/sqlalchemy/README.md000066400000000000000000000051521473475122500236050ustar00rootroot00000000000000## SQLAlchemy Integration The SQLAlchemy Integration translates the AST into a set of filters suitable for input into a filter of a SQLAlchemy Query. Given the following example model: ```python from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey from geoalchemy2 import Geometry Base = declarative_base() class Record(Base): __tablename__ = "record" identifier = Column(String, primary_key=True) geometry = Column( Geometry( geometry_type="MULTIPOLYGON", srid=4326, spatial_index=False, management=True, ) ) float_attribute = Column(Float) int_attribute = Column(Integer) str_attribute = Column(String) datetime_attribute = Column(DateTime) choice_attribute = Column(Integer) class RecordMeta(Base): __tablename__ = "record_meta" identifier = Column(Integer, primary_key=True) record = Column(String, ForeignKey("record.identifier")) float_meta_attribute = Column(Float) int_meta_attribute = Column(Integer) str_meta_attribute = Column(String) datetime_meta_attribute = Column(DateTime) choice_meta_attribute = Column(Integer) ``` Now we can specify the field mappings to be used when applying the filters: ```python FIELD_MAPPING = { "identifier": Record.identifier, "geometry": Record.geometry, "floatAttribute": Record.float_attribute, "intAttribute": Record.int_attribute, "strAttribute": Record.str_attribute, "datetimeAttribute": Record.datetime_attribute, "choiceAttribute": Record.choice_attribute, # meta fields "floatMetaAttribute": RecordMeta.float_meta_attribute, "intMetaAttribute": RecordMeta.int_meta_attribute, "strMetaAttribute": RecordMeta.str_meta_attribute, "datetimeMetaAttribute": RecordMeta.datetime_meta_attribute, "choiceMetaAttribute": RecordMeta.choice_meta_attribute, } ``` Finally we are able to connect the CQL AST to the SQLAlchemy database models. We also provide factory functions to parse the timestamps, durations, geometries and envelopes, so that they can be used with the ORM layer: ```python from pygeofilter.integrations.sqlalchemy import to_filter, parse cql_expr = 'strMetaAttribute LIKE "%parent%" AND datetimeAttribute BEFORE 2000-01-01T00:00:01Z' # NOTE: we are using the sqlalchemy integration `parse` wrapper here ast = parse(cql_expr) print(ast) filters = to_filter(ast, FIELD_MAPPING) q = session.query(Record).join(RecordMeta).filter(filters) ``` ## Tests Tests for the sqlalchemy integration can be run as following: ```shell python -m unittest discover tests/sqlalchemy_test/ tests.py ``` pygeofilter-0.3.1/pygeofilter/backends/sqlalchemy/__init__.py000066400000000000000000000000711473475122500244320ustar00rootroot00000000000000from .evaluate import to_filter __all__ = ["to_filter"] pygeofilter-0.3.1/pygeofilter/backends/sqlalchemy/evaluate.py000066400000000000000000000102771473475122500245120ustar00rootroot00000000000000from datetime import date, datetime, time, timedelta from ... import ast, values from ..evaluator import Evaluator, handle from . import filters LITERALS = (str, float, int, bool, datetime, date, time, timedelta) class SQLAlchemyFilterEvaluator(Evaluator): def __init__(self, field_mapping, undefined_as_null): self.field_mapping = field_mapping self.undefined_as_null = undefined_as_null @handle(ast.Not) def not_(self, node, sub): return filters.negate(sub) @handle(ast.And, ast.Or) def combination(self, node, lhs, rhs): return filters.combine((lhs, rhs), node.op.value) @handle(ast.Comparison, subclasses=True) def comparison(self, node, lhs, rhs): return filters.runop( lhs, rhs, node.op.value, ) @handle(ast.Between) def between(self, node, lhs, low, high): return filters.between(lhs, low, high, node.not_) @handle(ast.Like) def like(self, node, lhs): return filters.like( lhs, node.pattern, not node.nocase, node.not_, ) @handle(ast.In) def in_(self, node, lhs, *options): return filters.runop( lhs, options, "in", node.not_, ) @handle(ast.IsNull) def null(self, node, lhs): return filters.runop(lhs, None, "is_null", node.not_) # @handle(ast.ExistsPredicateNode) # def exists(self, node, lhs): # if self.use_getattr: # result = hasattr(self.obj, node.lhs.name) # else: # result = lhs in self.obj # if node.not_: # result = not result # return result @handle(ast.TemporalPredicate, subclasses=True) def temporal(self, node, lhs, rhs): return filters.temporal( lhs, rhs, node.op.value, ) @handle(ast.SpatialComparisonPredicate, subclasses=True) def spatial_operation(self, node, lhs, rhs): return filters.spatial( lhs, rhs, node.op.name, ) @handle(ast.Relate) def spatial_pattern(self, node, lhs, rhs): return filters.spatial( lhs, rhs, "RELATE", pattern=node.pattern, ) @handle(ast.SpatialDistancePredicate, subclasses=True) def spatial_distance(self, node, lhs, rhs): return filters.spatial( lhs, rhs, node.op.value, distance=node.distance, units=node.units, ) @handle(ast.BBox) def bbox(self, node, lhs): return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs) @handle(ast.Attribute) def attribute(self, node): return filters.attribute(node.name, self.field_mapping, self.undefined_as_null) @handle(ast.Arithmetic, subclasses=True) def arithmetic(self, node, lhs, rhs): return filters.runop(lhs, rhs, node.op.value) # TODO: map functions # @handle(ast.FunctionExpressionNode) # def function(self, node, *arguments): # return self.function_map[node.name](*arguments) @handle(*values.LITERALS) def literal(self, node): return filters.literal(node) @handle(values.Interval) def interval(self, node, start, end): return filters.literal((start, end)) @handle(values.Geometry) def geometry(self, node): return filters.parse_geometry(node.__geo_interface__) @handle(values.Envelope) def envelope(self, node): return filters.parse_bbox([node.x1, node.y1, node.x2, node.y2]) def to_filter(ast, field_mapping={}, undefined_as_null=None): """Helper function to translate ECQL AST to SQLAlchemy Query expressions. :param ast: the abstract syntax tree :param field_mapping: a dict mapping from the filter name to the SQLAlchemy field lookup. :param undefined_as_null: whether a name not present in field_mapping should evaluate to null. :type ast: :class:`Node` :returns: a SQLAlchemy query object """ return SQLAlchemyFilterEvaluator(field_mapping, undefined_as_null).evaluate(ast) pygeofilter-0.3.1/pygeofilter/backends/sqlalchemy/filters.py000066400000000000000000000222561473475122500243540ustar00rootroot00000000000000from datetime import timedelta from functools import reduce from inspect import signature from typing import Callable, Dict, Optional from pygeoif import shape from sqlalchemy import and_, func, not_, or_, null def parse_bbox(box, srid: Optional[int] = None): minx, miny, maxx, maxy = box return func.ST_GeomFromEWKT( f"SRID={4326 if srid is None else srid};POLYGON((" f"{minx} {miny}, {minx} {maxy}, " f"{maxx} {maxy}, {maxx} {miny}, " f"{minx} {miny}))" ) def parse_geometry(geom: dict): crs_identifier = ( geom.get("crs", {}) .get("properties", {}) .get("name", "urn:ogc:def:crs:EPSG::4326") ) srid = crs_identifier.rpartition("::")[-1] wkt = shape(geom).wkt return func.ST_GeomFromEWKT(f"SRID={srid};{wkt}") # ------------------------------------------------------------------------------ # Filters # ------------------------------------------------------------------------------ class Operator: OPERATORS: Dict[str, Callable] = { "is_null": lambda f, a=None: f.is_(None), "is_not_null": lambda f, a=None: f.isnot(None), "==": lambda f, a: f == a, "=": lambda f, a: f == a, "eq": lambda f, a: f == a, "!=": lambda f, a: f != a, "<>": lambda f, a: f != a, "ne": lambda f, a: f != a, ">": lambda f, a: f > a, "gt": lambda f, a: f > a, "<": lambda f, a: f < a, "lt": lambda f, a: f < a, ">=": lambda f, a: f >= a, "ge": lambda f, a: f >= a, "<=": lambda f, a: f <= a, "le": lambda f, a: f <= a, "like": lambda f, a: f.like(a), "ilike": lambda f, a: f.ilike(a), "not_ilike": lambda f, a: ~f.ilike(a), "in": lambda f, a: f.in_(a), "not_in": lambda f, a: ~f.in_(a), "any": lambda f, a: f.any(a), "not_any": lambda f, a: func.not_(f.any(a)), "INTERSECTS": lambda f, a: f.ST_Intersects(a), "DISJOINT": lambda f, a: f.ST_Disjoint(a), "CONTAINS": lambda f, a: f.ST_Contains(a), "WITHIN": lambda f, a: f.ST_Within(a), "TOUCHES": lambda f, a: f.ST_Touches(a), "CROSSES": lambda f, a: f.ST_Crosses(a), "OVERLAPS": lambda f, a: f.ST_Overlaps(a), "EQUALS": lambda f, a: f.ST_Equals(a), "RELATE": lambda f, a, pattern: f.ST_Relate(a, pattern), "DWITHIN": lambda f, a, distance: f.ST_Dwithin(a, distance), "BEYOND": lambda f, a, distance: ~f.ST_Dwithin(a, distance), "+": lambda f, a: f + a, "-": lambda f, a: f - a, "*": lambda f, a: f * a, "/": lambda f, a: f / a, } def __init__(self, operator: Optional[str] = None): if not operator: operator = "==" if operator not in self.OPERATORS: raise Exception("Operator `{}` not valid.".format(operator)) self.operator = operator self.function = self.OPERATORS[operator] self.arity = len(signature(self.function).parameters) def combine(sub_filters, combinator: str = "AND"): """Combine filters using a logical combinator :param sub_filters: the filters to combine :param combinator: a string: "AND" / "OR" :return: the combined filter """ assert combinator in ("AND", "OR") _op = and_ if combinator == "AND" else or_ def test(acc, q): return _op(acc, q) return reduce(test, sub_filters) def negate(sub_filter): """Negate a filter, opposing its meaning. :param sub_filter: the filter to negate :return: the negated filter """ return not_(sub_filter) def runop(lhs, rhs=None, op: str = "=", negate: bool = False): """Compare a filter with an expression using a comparison operation :param lhs: the field to compare :param rhs: the filter expression :param op: a string denoting the operation. :return: a comparison expression object """ _op = Operator(op) if negate: return not_(_op.function(lhs, rhs)) return _op.function(lhs, rhs) def between(lhs, low, high, negate=False): """Create a filter to match elements that have a value within a certain range. :param lhs: the field to compare :param low: the lower value of the range :param high: the upper value of the range :param not_: whether the range shall be inclusive (the default) or exclusive :return: a comparison expression object """ l_op = Operator("<=") g_op = Operator(">=") if negate: return not_(and_(g_op.function(lhs, low), l_op.function(lhs, high))) return and_(g_op.function(lhs, low), l_op.function(lhs, high)) def like(lhs, rhs, case=False, negate=False): """Create a filter to filter elements according to a string attribute using wildcard expressions. :param lhs: the field to compare :param rhs: the wildcard pattern: a string containing any number of '%' characters as wildcards. :param case: whether the lookup shall be done case sensitively or not :param not_: whether the range shall be inclusive (the default) or exclusive :return: a comparison expression object """ if case: _op = Operator("like") else: _op = Operator("ilike") if negate: return not_(_op.function(lhs, rhs)) return _op.function(lhs, rhs) def temporal(lhs, time_or_period, op): """Create a temporal filter for the given temporal attribute. :param lhs: the field to compare :type lhs: :class:`django.db.models.F` :param time_or_period: the time instant or time span to use as a filter :type time_or_period: :class:`datetime.datetime` or a tuple of two datetimes or a tuple of one datetime and one :class:`datetime.timedelta` :param op: the comparison operation. one of ``"BEFORE"``, ``"BEFORE OR DURING"``, ``"DURING"``, ``"DURING OR AFTER"``, ``"AFTER"``. :type op: str :return: a comparison expression object :rtype: :class:`django.db.models.Q` """ low = None high = None equal = None if op in ("BEFORE", "AFTER"): if op == "BEFORE": high = time_or_period else: low = time_or_period elif op == "TEQUALS": equal = time_or_period else: low, high = time_or_period if isinstance(low, timedelta): low = high - low if isinstance(high, timedelta): high = low + high if low is not None or high is not None: if low is not None and high is not None: return between(lhs, low, high) elif low is not None: return runop(lhs, low, ">=") else: return runop(lhs, high, "<=") elif equal is not None: return runop(lhs, equal, "==") UNITS_LOOKUP = {"kilometers": "km", "meters": "m"} def spatial(lhs, rhs, op, pattern=None, distance=None, units=None): """Create a spatial filter for the given spatial attribute. :param lhs: the field to compare :param rhs: the time instant or time span to use as a filter :param op: the comparison operation. one of ``"INTERSECTS"``, ``"DISJOINT"``, `"CONTAINS"``, ``"WITHIN"``, ``"TOUCHES"``, ``"CROSSES"``, ``"OVERLAPS"``, ``"EQUALS"``, ``"RELATE"``, ``"DWITHIN"``, ``"BEYOND"`` :param pattern: the spatial relation pattern :param distance: the distance value for distance based lookups: ``"DWITHIN"`` and ``"BEYOND"`` :param units: the units the distance is expressed in :return: a comparison expression object """ _op = Operator(op) if op == "RELATE": return _op.function(lhs, rhs, pattern) elif op in ("DWITHIN", "BEYOND"): if units == "kilometers": distance = distance / 1000 elif units == "miles": distance = distance / 1609 return _op.function(lhs, rhs, distance) else: return _op.function(lhs, rhs) def bbox(lhs, minx, miny, maxx, maxy, crs=4326): """Create a bounding box filter for the given spatial attribute. :param lhs: the field to compare :param minx: the lower x part of the bbox :param miny: the lower y part of the bbox :param maxx: the upper x part of the bbox :param maxy: the upper y part of the bbox :param crs: the CRS the bbox is expressed in :return: a comparison expression object """ return lhs.ST_Intersects(parse_bbox([minx, miny, maxx, maxy], crs)) def attribute(name, field_mapping={}, undefined_as_null: bool = None): """Create an attribute lookup expression using a field mapping dictionary. :param name: the field filter name :param field_mapping: the dictionary to use as a lookup. :param undefined_as_null: how to handle a name not present in field_mapping (None (default) - leave as-is; True - treat as null; False - throw error) """ if undefined_as_null is None: return field_mapping.get(name, name) if undefined_as_null: # return null object if name is not found in field_mapping return field_mapping.get(name, null()) # undefined_as_null is False, so raise KeyError if name not found return field_mapping[name] def literal(value): return value pygeofilter-0.3.1/pygeofilter/cql2.py000066400000000000000000000050751473475122500176310ustar00rootroot00000000000000# Common configurations for cql2 parsers and evaluators. from typing import Dict, Type, Union from . import ast # https://github.com/opengeospatial/ogcapi-features/tree/master/cql2 COMPARISON_MAP: Dict[str, Type[ast.Node]] = { "=": ast.Equal, "eq": ast.Equal, "<>": ast.NotEqual, "!=": ast.NotEqual, "ne": ast.NotEqual, "<": ast.LessThan, "lt": ast.LessThan, "<=": ast.LessEqual, "lte": ast.LessEqual, ">": ast.GreaterThan, "gt": ast.GreaterThan, ">=": ast.GreaterEqual, "gte": ast.GreaterEqual, "like": ast.Like, } SPATIAL_PREDICATES_MAP: Dict[str, Type[ast.SpatialComparisonPredicate]] = { "s_intersects": ast.GeometryIntersects, "s_equals": ast.GeometryEquals, "s_disjoint": ast.GeometryDisjoint, "s_touches": ast.GeometryTouches, "s_within": ast.GeometryWithin, "s_overlaps": ast.GeometryOverlaps, "s_crosses": ast.GeometryCrosses, "s_contains": ast.GeometryContains, } TEMPORAL_PREDICATES_MAP: Dict[str, Type[ast.TemporalPredicate]] = { "t_before": ast.TimeBefore, "t_after": ast.TimeAfter, "t_meets": ast.TimeMeets, "t_metby": ast.TimeMetBy, "t_overlaps": ast.TimeOverlaps, "t_overlappedby": ast.TimeOverlappedBy, "t_begins": ast.TimeBegins, "t_begunby": ast.TimeBegunBy, "t_during": ast.TimeDuring, "t_contains": ast.TimeContains, "t_ends": ast.TimeEnds, "t_endedby": ast.TimeEndedBy, "t_equals": ast.TimeEquals, "t_intersects": ast.TimeOverlaps, } ARRAY_PREDICATES_MAP: Dict[str, Type[ast.ArrayPredicate]] = { "a_equals": ast.ArrayEquals, "a_contains": ast.ArrayContains, "a_containedby": ast.ArrayContainedBy, "a_overlaps": ast.ArrayOverlaps, } ARITHMETIC_MAP: Dict[str, Type[ast.Arithmetic]] = { "+": ast.Add, "-": ast.Sub, "*": ast.Mul, "/": ast.Div, } CONDITION_MAP: Dict[str, Type[ast.Node]] = { "and": ast.And, "or": ast.Or, "not": ast.Not, "isNull": ast.IsNull, } BINARY_OP_PREDICATES_MAP: Dict[ str, Union[ Type[ast.Node], Type[ast.Comparison], Type[ast.SpatialComparisonPredicate], Type[ast.TemporalPredicate], Type[ast.ArrayPredicate], Type[ast.Arithmetic], ], ] = { **COMPARISON_MAP, **SPATIAL_PREDICATES_MAP, **TEMPORAL_PREDICATES_MAP, **ARRAY_PREDICATES_MAP, **ARITHMETIC_MAP, **CONDITION_MAP, } def get_op(node: ast.Node) -> Union[str, None]: # Get the cql2 operator string from a node. for k, v in BINARY_OP_PREDICATES_MAP.items(): if isinstance(node, v): return k return None pygeofilter-0.3.1/pygeofilter/examples/000077500000000000000000000000001473475122500202255ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/examples/cql2.ipynb000066400000000000000000000140411473475122500221310ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "fe8453fa", "metadata": {}, "outputs": [], "source": [ "from pygeofilter.parsers.cql2_json import parse\n", "from pygeofilter.backends.cql2_json import to_cql2\n", "import json\n", "import traceback\n", "from lark import lark, logger, v_args\n", "from pygeofilter.cql2 import BINARY_OP_PREDICATES_MAP\n" ] }, { "cell_type": "code", "execution_count": 2, "id": "b960603d", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "And(lhs=And(lhs=And(lhs=Equal(lhs=ATTRIBUTE collection, rhs='landsat8_l1tp'), rhs=LessEqual(lhs=ATTRIBUTE gsd, rhs=30)), rhs=LessEqual(lhs=ATTRIBUTE eo:cloud_cover, rhs=10)), rhs=GreaterEqual(lhs=ATTRIBUTE datetime, rhs=datetime.datetime(2021, 4, 8, 4, 39, 23, tzinfo=)))" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from pygeofilter.parsers.cql2_text import parse as cql2_parse\n", "cql2_parse(\"collection = 'landsat8_l1tp' AND gsd <= 30 AND eo:cloud_cover <= 10 AND datetime >= TIMESTAMP('2021-04-08T04:39:23Z')\")" ] }, { "cell_type": "code", "execution_count": 3, "id": "c5f47281", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Example 1\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 2\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 3\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 4\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 5\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 6\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 7\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 8\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 9\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 10\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 11\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n", "Example 12\n", "*******parsed trees match***************\n", "*******reconstructed json matches*******\n", "____________________________________________________________\n" ] } ], "source": [ "from pygeofilter.parsers.cql2_text import parse as text_parse\n", "from pygeofilter.parsers.cql2_json import parse as json_parse\n", "from pygeofilter.backends.cql2_json import to_cql2\n", "import orjson\n", "import json\n", "import pprint\n", "def pp(j):\n", " print(orjson.dumps(j))\n", "with open('tests/parsers/cql2_json/fixtures.json') as f:\n", " examples = json.load(f)\n", "\n", "for k, v in examples.items():\n", " parsed_text = None\n", " parsed_json = None\n", " print (k)\n", " t=v['text'].replace('filter=','')\n", " j=v['json']\n", " # print('\\t' + t)\n", " # pp(orjson.loads(j))\n", " # print('*****')\n", " try:\n", " parsed_text=text_parse(t)\n", " parsed_json=json_parse(j)\n", " if parsed_text == parsed_json:\n", " print('*******parsed trees match***************')\n", " else:\n", " print(parsed_text)\n", " print('-----')\n", " print(parsed_json)\n", " if parsed_json is None or parsed_text is None:\n", " raise Exception\n", " if to_cql2(parsed_text) == to_cql2(parsed_json):\n", " print('*******reconstructed json matches*******')\n", " else:\n", " pp(to_cql2(parsed_text))\n", " print('-----')\n", " pp(to_cql2(parsed_json))\n", " except Exception as e:\n", " print(parsed_text)\n", " print(parsed_json)\n", " print(j)\n", " traceback.print_exc(f\"Error: {e}\")\n", " pass\n", " print('____________________________________________________________')\n", " " ] }, { "cell_type": "code", "execution_count": null, "id": "ac0bb004", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "pygeofilter", "language": "python", "name": "pygeofilter" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.10" } }, "nbformat": 4, "nbformat_minor": 5 } pygeofilter-0.3.1/pygeofilter/parsers/000077500000000000000000000000001473475122500200665ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/__init__.py000066400000000000000000000000001473475122500221650ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/cql2_json/000077500000000000000000000000001473475122500217605ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/cql2_json/__init__.py000066400000000000000000000000571473475122500240730ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/cql2_json/parser.py000066400000000000000000000133371473475122500236350ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler , # David Bitner # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import date, datetime, timedelta from typing import List, Union, cast from ... import ast, values from ...cql2 import BINARY_OP_PREDICATES_MAP from ...util import parse_date, parse_datetime, parse_duration # https://github.com/opengeospatial/ogcapi-features/tree/master/cql2 JsonType = Union[dict, list, str, float, int, bool, None] def walk_cql_json(node: JsonType): # noqa: C901 if isinstance( node, ( str, float, int, bool, datetime, values.Geometry, values.Interval, ast.Node, ), ): return node if isinstance(node, list): return [walk_cql_json(sub_node) for sub_node in node] if not isinstance(node, dict): raise ValueError(f"Invalid type {type(node)}") if "filter-lang" in node and node["filter-lang"] != "cql2-json": raise Exception(f"Cannot parse {node['filter-lang']} with cql2-json.") elif "filter" in node: return walk_cql_json(node["filter"]) # check if we are dealing with a geometry if "type" in node and "coordinates" in node: # TODO: test if node is actually valid return values.Geometry(node) elif "bbox" in node: return values.Envelope(*node["bbox"]) elif "date" in node: return parse_date(node["date"]) elif "timestamp" in node: return parse_datetime(node["timestamp"]) elif "interval" in node: parsed: List[Union[date, datetime, timedelta, None]] = [] for value in node["interval"]: if value == "..": parsed.append(None) continue try: parsed.append(parse_date(value)) except ValueError: try: parsed.append(parse_duration(value)) except ValueError: parsed.append(parse_datetime(value)) return values.Interval(*parsed) elif "property" in node: return ast.Attribute(node["property"]) elif "function" in node: return ast.Function( node["function"]["name"], cast(List[ast.AstType], walk_cql_json(node["function"]["arguments"])), ) elif "lower" in node: return ast.Function("lower", [cast(ast.Node, walk_cql_json(node["lower"]))]) elif "op" in node: op = node["op"] args = walk_cql_json(node["args"]) if op in ("and", "or"): return (ast.And if op == "and" else ast.Or).from_items(*args) elif op == "not": # allow both arrays and objects, the standard is ambigous in # that regard if isinstance(args, list): args = args[0] return ast.Not(cast(ast.Node, walk_cql_json(args))) elif op == "isNull": # like with "not", allow both arrays and objects if isinstance(args, list): args = args[0] return ast.IsNull(cast(ast.Node, walk_cql_json(args)), not_=False) elif op == "between": return ast.Between( cast(ast.Node, walk_cql_json(args[0])), cast(ast.ScalarAstType, walk_cql_json(args[1][0])), cast(ast.ScalarAstType, walk_cql_json(args[1][1])), not_=False, ) elif op == "like": return ast.Like( cast(ast.Node, walk_cql_json(args[0])), pattern=cast(str, args[1]), nocase=False, wildcard="%", singlechar=".", escapechar="\\", not_=False, ) elif op == "in": return ast.In( cast(ast.AstType, walk_cql_json(args[0])), cast(List[ast.AstType], walk_cql_json(args[1])), not_=False, ) elif op in BINARY_OP_PREDICATES_MAP: args = [cast(ast.Node, walk_cql_json(arg)) for arg in args] return BINARY_OP_PREDICATES_MAP[op](*args) raise ValueError(f"Unable to parse expression node {node!r}") def parse(cql: Union[str, dict]) -> ast.AstType: if isinstance(cql, str): root = json.loads(cql) else: root = cql return walk_cql_json(root) pygeofilter-0.3.1/pygeofilter/parsers/cql2_text/000077500000000000000000000000001473475122500217735ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/cql2_text/__init__.py000066400000000000000000000000571473475122500241060ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/cql2_text/grammar.lark000066400000000000000000000150311473475122500242740ustar00rootroot00000000000000// ------------------------------------------------------------------------------ // // Project: pygeofilter // Authors: Fabian Schindler , David Bitner // // ------------------------------------------------------------------------------ // Copyright (C) 2021 EOX IT Services GmbH // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies of this Software or works derived from this Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ------------------------------------------------------------------------------ ?start: condition ?condition: condition_1 | condition "AND"i condition_1 -> and_ | condition "OR"i condition_1 -> or_ ?condition_1: predicate | "NOT"i predicate -> not_ | "(" condition ")" ?predicate: expression "=" expression -> eq | expression "eq"i expression -> eq | expression "<>" expression -> ne | expression "ne"i expression -> ne | expression "!=" expression -> ne | expression "<" expression -> lt | expression "lt"i expression -> lt | expression "<=" expression -> lte | expression "lte"i expression -> lte | expression ">" expression -> gt | expression "gt"i expression -> gt | expression ">=" expression -> gte | expression "gte"i expression -> gte | expression "BETWEEN"i expression "AND"i expression -> between | expression "LIKE"i SINGLE_QUOTED -> like | expression "IN"i "(" expression ( "," expression )* ")" -> in_ | expression "IS"i "NULL"i -> null | expression "IS"i "NOT"i "NULL"i -> not_null | "INCLUDE"i -> include | "EXCLUDE"i -> exclude | spatial_predicate | temporal_predicate ?temporal_predicate: expression _binary_temporal_predicate_func expression -> binary_temporal_predicate !_binary_temporal_predicate_func: "T_BEFORE"i | "T_AFTER"i | "T_MEETS"i | "T_METBY"i | "T_OVERLAPS"i | "T_OVERLAPPEDBY"i | "T_BEGINS"i | "T_BEGUNBY"i | "T_DURING"i | "T_CONTAINS"i | "T_ENDS"i | "T_ENDEDBY"i | "T_EQUALS"i | "T_INTERSECTS"i ?spatial_predicate: _binary_spatial_predicate_func "(" expression "," expression ")" -> binary_spatial_predicate | "RELATE" "(" expression "," expression "," SINGLE_QUOTED ")" -> relate_spatial_predicate | "BBOX" "(" expression "," full_number "," full_number "," full_number "," full_number [ "," SINGLE_QUOTED] ")" -> bbox_spatial_predicate !_binary_spatial_predicate_func: "S_INTERSECTS"i | "S_DISJOINT"i | "S_CONTAINS"i | "S_WITHIN"i | "S_TOUCHES"i | "S_CROSSES"i | "S_OVERLAPS"i | "S_EQUALS"i ?expression: sum ?sum: product | sum "+" product -> add | sum "-" product -> sub ?product: atom | product "*" atom -> mul | product "/" atom -> div ?atom: func | attribute | literal | "-" atom -> neg | "(" expression ")" func.2: attribute "(" expression ("," expression)* ")" -> function ?literal: timestamp | interval | number | BOOLEAN | SINGLE_QUOTED | ewkt_geometry -> geometry | envelope ?full_number: number | "-" number -> neg ?number: FLOAT | INT envelope: "ENVELOPE"i "(" number number number number ")" BOOLEAN.2: ( "TRUE"i | "FALSE"i) DOUBLE_QUOTED: "\"" /.*?/ "\"" SINGLE_QUOTED: "'" /.*?/ "'" DATETIME: /[0-9]{4}-?[0-1][0-9]-?[0-3][0-9][T ][0-2][0-9]:?[0-5][0-9]:?[0-5][0-9](\.[0-9]+)?(Z|[+-][0-9]{2}:[0-9]{2})?/ ?timestamp: "TIMESTAMP" "(" "'" DATETIME "'" ")" ?interval: "INTERVAL" "(" "'" DATETIME "'" "," "'" DATETIME "'" ")" attribute: /[a-zA-Z][a-zA-Z_:0-9.]+/ | DOUBLE_QUOTED // NAME: /[a-z_]+/ %import .wkt.ewkt_geometry // %import common.CNAME -> NAME %import common.INT %import common.FLOAT %import common.WS_INLINE %ignore WS_INLINE pygeofilter-0.3.1/pygeofilter/parsers/cql2_text/parser.py000066400000000000000000000142411473475122500236430ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler , # David Bitner # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import logging import os.path from lark import Lark, logger, v_args from ... import ast, values from ...cql2 import SPATIAL_PREDICATES_MAP, TEMPORAL_PREDICATES_MAP from ..iso8601 import ISO8601Transformer from ..wkt import WKTTransformer logger.setLevel(logging.DEBUG) @v_args(meta=False, inline=True) class CQLTransformer(WKTTransformer, ISO8601Transformer): def and_(self, *args): return ast.And.from_items(*args) def or_(self, *args): return ast.Or.from_items(*args) def not_(self, node): return ast.Not(node) def eq(self, lhs, rhs): return ast.Equal(lhs, rhs) def ne(self, lhs, rhs): return ast.NotEqual(lhs, rhs) def lt(self, lhs, rhs): return ast.LessThan(lhs, rhs) def lte(self, lhs, rhs): return ast.LessEqual(lhs, rhs) def gt(self, lhs, rhs): return ast.GreaterThan(lhs, rhs) def gte(self, lhs, rhs): return ast.GreaterEqual(lhs, rhs) def between(self, lhs, low, high): return ast.Between(lhs, low, high, False) def not_between(self, lhs, low, high): return ast.Between(lhs, low, high, True) def like(self, node, pattern): return ast.Like(node, pattern, False, "%", ".", "\\", False) def not_like(self, node, pattern): return ast.Like(node, pattern, False, "%", ".", "\\", True) def ilike(self, node, pattern): return ast.Like(node, pattern, True, "%", ".", "\\", False) def not_ilike(self, node, pattern): return ast.Like(node, pattern, True, "%", ".", "\\", True) def in_(self, node, *options): return ast.In(node, list(options), False) def not_in(self, node, *options): return ast.In(node, list(options), True) def null(self, node): return ast.IsNull(node, False) def not_null(self, node): return ast.IsNull(node, True) def exists(self, attribute): return ast.Exists(attribute, False) def does_not_exist(self, attribute): return ast.Exists(attribute, True) def include(self): return ast.Include(False) def exclude(self): return ast.Include(True) def before(self, node, dt): return ast.TimeBefore(node, dt) def before_or_during(self, node, period): return ast.TimeBeforeOrDuring(node, period) def during(self, node, period): return ast.TimeDuring(node, period) def during_or_after(self, node, period): return ast.TimeDuringOrAfter(node, period) def after(self, node, dt): return ast.TimeAfter(node, dt) def binary_spatial_predicate(self, op, lhs, rhs): op = op.lower() return SPATIAL_PREDICATES_MAP[op](lhs, rhs) def binary_temporal_predicate(self, lhs, op, rhs): op = op.lower() return TEMPORAL_PREDICATES_MAP[op](lhs, rhs) def relate_spatial_predicate(self, lhs, rhs, pattern): return ast.Relate(lhs, rhs, pattern) def distance_spatial_predicate(self, op, lhs, rhs, distance, units): cls = ast.DistanceWithin if op == "DWITHIN" else ast.DistanceBeyond return cls(lhs, rhs, distance, units) def distance_units(self, value): return value def bbox_spatial_predicate(self, lhs, minx, miny, maxx, maxy, crs=None): return ast.BBox(lhs, minx, miny, maxx, maxy, crs) def function(self, func_name, *expressions): name = func_name.name.lower() if name == "casei": name = "lower" return ast.Function(name, list(expressions)) def add(self, lhs, rhs): return ast.Add(lhs, rhs) def sub(self, lhs, rhs): return ast.Sub(lhs, rhs) def mul(self, lhs, rhs): return ast.Mul(lhs, rhs) def div(self, lhs, rhs): return ast.Div(lhs, rhs) def neg(self, value): return -value def attribute(self, name): return ast.Attribute(str(name)) def period(self, start, end): return [start, end] def INT(self, value): return int(value) def FLOAT(self, value): return float(value) def BOOLEAN(self, value): return value.lower() == "true" def DOUBLE_QUOTED(self, token): return token[1:-1] def SINGLE_QUOTED(self, token): return token[1:-1] def geometry(self, value): return values.Geometry(value) def envelope(self, x1, x2, y1, y2): return values.Envelope(x1, x2, y1, y2) def interval(self, start, end): return values.Interval(start, end) parser = Lark.open( "grammar.lark", rel_to=__file__, parser="lalr", debug=True, maybe_placeholders=False, transformer=CQLTransformer(), import_paths=[os.path.dirname(os.path.dirname(__file__))], ) def parse(cql_text): return parser.parse(cql_text) if __name__ == "__main__": print(parse("'abc' < 'bce'")) pygeofilter-0.3.1/pygeofilter/parsers/cql_json/000077500000000000000000000000001473475122500216765ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/cql_json/__init__.py000066400000000000000000000000571473475122500240110ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/cql_json/parser.py000066400000000000000000000163141473475122500235510ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import datetime from typing import List, Union, cast from ... import ast, values from ...util import parse_datetime, parse_duration from ...values import Envelope, Geometry # https://portal.ogc.org/files/96288 COMPARISON_MAP = { "eq": ast.Equal, "lt": ast.LessThan, "lte": ast.LessEqual, "gt": ast.GreaterThan, "gte": ast.GreaterEqual, } SPATIAL_PREDICATES_MAP = { "intersects": ast.GeometryIntersects, "equals": ast.GeometryEquals, "disjoint": ast.GeometryDisjoint, "touches": ast.GeometryTouches, "within": ast.GeometryWithin, "overlaps": ast.GeometryOverlaps, "crosses": ast.GeometryCrosses, "contains": ast.GeometryContains, } TEMPORAL_PREDICATES_MAP = { "before": ast.TimeBefore, "after": ast.TimeAfter, "meets": ast.TimeMeets, "metby": ast.TimeMetBy, "toverlaps": ast.TimeOverlaps, "overlappedby": ast.TimeOverlappedBy, "begins": ast.TimeBegins, "begunby": ast.TimeBegunBy, "during": ast.TimeDuring, "tcontains": ast.TimeContains, "ends": ast.TimeEnds, "endedby": ast.TimeEndedBy, "tequals": ast.TimeEquals, # 'anyinteract': ast.TimeAnyInteract, # TODO? } ARRAY_PREDICATES_MAP = { "aequals": ast.ArrayEquals, "acontains": ast.ArrayContains, "acontainedBy": ast.ArrayContainedBy, "aoverlaps": ast.ArrayOverlaps, } ARITHMETIC_MAP = { "+": ast.Add, "-": ast.Sub, "*": ast.Mul, "/": ast.Div, } def walk_cql_json(node: dict, is_temporal: bool = False) -> ast.AstType: # noqa: C901 if is_temporal and isinstance(node, str): # Open interval if node == "..": return None try: return parse_duration(node) except ValueError: value = parse_datetime(node) if value is None: raise ValueError(f"Failed to parse temporal value from {node}") return value if isinstance(node, (str, float, int, bool)): return node if isinstance(node, list): result = [ cast(datetime, walk_cql_json(sub_node, is_temporal)) for sub_node in node ] if is_temporal: return values.Interval(*result) else: return result assert isinstance(node, dict) # check if we are dealing with a geometry if "type" in node and "coordinates" in node: # TODO: test if node is actually valid return Geometry(node) elif "bbox" in node: return Envelope(*node["bbox"]) # decode all other nodes for name, value in node.items(): if name in ("and", "or"): sub_items = cast(list, walk_cql_json(value)) return (ast.And if name == "and" else ast.Or).from_items(*sub_items) elif name == "not": # allow both arrays and objects, the standard is ambigous in # that regard if isinstance(value, list): value = value[0] return ast.Not(cast(ast.Node, walk_cql_json(value))) elif name in COMPARISON_MAP: return COMPARISON_MAP[name]( cast(ast.ScalarAstType, walk_cql_json(value[0])), cast(ast.ScalarAstType, walk_cql_json(value[1])), ) elif name == "between": return ast.Between( cast(ast.Node, walk_cql_json(value["value"])), cast(ast.ScalarAstType, walk_cql_json(value["lower"])), cast(ast.ScalarAstType, walk_cql_json(value["upper"])), not_=False, ) elif name == "like": return ast.Like( cast(ast.Node, walk_cql_json(value["like"][0])), cast(str, value["like"][1]), nocase=value.get("nocase", True), wildcard=value.get("wildcard", "%"), singlechar=value.get("singleChar", "."), escapechar=value.get("escapeChar", "\\"), not_=False, ) elif name == "in": return ast.In( cast(ast.AstType, walk_cql_json(value["value"])), cast(List[ast.AstType], walk_cql_json(value["list"])), not_=False, # TODO nocase ) elif name == "isNull": return ast.IsNull( walk_cql_json(value), not_=False, ) elif name in SPATIAL_PREDICATES_MAP: return SPATIAL_PREDICATES_MAP[name]( cast(ast.SpatialAstType, walk_cql_json(value[0])), cast(ast.SpatialAstType, walk_cql_json(value[1])), ) elif name in TEMPORAL_PREDICATES_MAP: return TEMPORAL_PREDICATES_MAP[name]( cast(ast.TemporalAstType, walk_cql_json(value[0], is_temporal=True)), cast(ast.TemporalAstType, walk_cql_json(value[1], is_temporal=True)), ) elif name in ARRAY_PREDICATES_MAP: return ARRAY_PREDICATES_MAP[name]( cast(ast.ArrayAstType, walk_cql_json(value[0])), cast(ast.ArrayAstType, walk_cql_json(value[1])), ) elif name in ARITHMETIC_MAP: return ARITHMETIC_MAP[name]( cast(ast.ScalarAstType, walk_cql_json(value[0])), cast(ast.ScalarAstType, walk_cql_json(value[1])), ) elif name == "property": return ast.Attribute(value) elif name == "function": return ast.Function( value["name"], cast(List[ast.AstType], walk_cql_json(value["arguments"])), ) raise ValueError(f"Unable to parse expression node {node!r}") def parse(cql: Union[str, dict]) -> ast.AstType: if isinstance(cql, str): root = json.loads(cql) else: root = cql return walk_cql_json(root) pygeofilter-0.3.1/pygeofilter/parsers/ecql/000077500000000000000000000000001473475122500210125ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/ecql/__init__.py000066400000000000000000000000571473475122500231250ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/ecql/grammar.lark000066400000000000000000000144601473475122500233200ustar00rootroot00000000000000// ------------------------------------------------------------------------------ // // Project: pygeofilter // Authors: Fabian Schindler // // ------------------------------------------------------------------------------ // Copyright (C) 2021 EOX IT Services GmbH // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies of this Software or works derived from this Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ------------------------------------------------------------------------------ ?start: condition ?condition: condition_1 | condition "AND" condition_1 -> and_ | condition "OR" condition_1 -> or_ ?condition_1: predicate | "NOT" predicate -> not_ | "(" condition ")" ?predicate: expression "=" expression -> eq | expression "<>" expression -> ne | expression "<" expression -> lt | expression "<=" expression -> lte | expression ">" expression -> gt | expression ">=" expression -> gte | expression "BETWEEN" expression "AND" expression -> between | expression "NOT" "BETWEEN" expression "AND" expression -> not_between | expression "LIKE" SINGLE_QUOTED -> like | expression "NOT" "LIKE" SINGLE_QUOTED -> not_like | expression "ILIKE" SINGLE_QUOTED -> ilike | expression "NOT" "ILIKE" SINGLE_QUOTED -> not_ilike | expression "IN" "(" expression ( "," expression )* ")" -> in_ | expression "NOT" "IN" "(" expression ( "," expression )* ")" -> not_in | expression "IS" "NULL" -> null | expression "IS" "NOT" "NULL" -> not_null | attribute "EXISTS" -> exists | attribute "DOES-NOT-EXIST" -> does_not_exist | "INCLUDE" -> include | "EXCLUDE" -> exclude | temporal_predicate | spatial_predicate ?temporal_predicate: expression "BEFORE" DATETIME -> before | expression "BEFORE" "OR" "DURING" period -> before_or_during | expression "DURING" period -> during | expression "DURING" "OR" "AFTER" period -> during_or_after | expression "AFTER" DATETIME -> after ?spatial_predicate: _binary_spatial_predicate_func "(" expression "," expression ")" -> binary_spatial_predicate | "RELATE" "(" expression "," expression "," SINGLE_QUOTED ")" -> relate_spatial_predicate | _distance_spatial_predicate_func "(" expression "," expression "," number "," distance_units ")" -> distance_spatial_predicate | "BBOX" "(" expression "," full_number "," full_number "," full_number "," full_number [ "," SINGLE_QUOTED] ")" -> bbox_spatial_predicate !_binary_spatial_predicate_func: "INTERSECTS" | "DISJOINT" | "CONTAINS" | "WITHIN" | "TOUCHES" | "CROSSES" | "OVERLAPS" | "EQUALS" !_distance_spatial_predicate_func: "DWITHIN" | "BEYOND" !distance_units: "feet" | "meters" | "statute miles" | "nautical miles" | "kilometers" -> distance_units ?expression: sum ?sum: product | sum "+" product -> add | sum "-" product -> sub ?product: atom | product "*" atom -> mul | product "/" atom -> div ?atom: attribute | literal | "-" atom -> neg | NAME "(" [ expression ("," expression)* ] ")" -> function | "(" expression ")" attribute: NAME | DOUBLE_QUOTED | QUALIFIED_NAME ?literal: number | BOOLEAN | SINGLE_QUOTED | ewkt_geometry -> geometry | envelope ?full_number: number | "-" number -> neg ?number: FLOAT | INT period: DATETIME "/" DATETIME | DURATION "/" DATETIME | DATETIME "/" DURATION envelope: "ENVELOPE" "(" number number number number ")" BOOLEAN.2: ( "TRUE"i | "FALSE"i ) DOUBLE_QUOTED: "\"" /.*?/ "\"" SINGLE_QUOTED: "'" /.*?/ "'" QUALIFIED_NAME: (NAME ("." | ":"))+ NAME %import .wkt.ewkt_geometry %import .iso8601.DATETIME %import .iso8601.DURATION %import common.CNAME -> NAME %import common.INT %import common.FLOAT %import common.WS_INLINE %ignore WS_INLINE pygeofilter-0.3.1/pygeofilter/parsers/ecql/parser.py000066400000000000000000000140051473475122500226600ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import logging import os.path from lark import Lark, logger, v_args from ... import ast, values from ..iso8601 import ISO8601Transformer from ..wkt import WKTTransformer logger.setLevel(logging.DEBUG) SPATIAL_PREDICATES_MAP = { "INTERSECTS": ast.GeometryIntersects, "DISJOINT": ast.GeometryDisjoint, "CONTAINS": ast.GeometryContains, "WITHIN": ast.GeometryWithin, "TOUCHES": ast.GeometryTouches, "CROSSES": ast.GeometryCrosses, "OVERLAPS": ast.GeometryOverlaps, "EQUALS": ast.GeometryEquals, } @v_args(meta=False, inline=True) class ECQLTransformer(WKTTransformer, ISO8601Transformer): def and_(self, lhs, rhs): return ast.And(lhs, rhs) def or_(self, lhs, rhs): return ast.Or(lhs, rhs) def not_(self, node): return ast.Not(node) def eq(self, lhs, rhs): return ast.Equal(lhs, rhs) def ne(self, lhs, rhs): return ast.NotEqual(lhs, rhs) def lt(self, lhs, rhs): return ast.LessThan(lhs, rhs) def lte(self, lhs, rhs): return ast.LessEqual(lhs, rhs) def gt(self, lhs, rhs): return ast.GreaterThan(lhs, rhs) def gte(self, lhs, rhs): return ast.GreaterEqual(lhs, rhs) def between(self, lhs, low, high): return ast.Between(lhs, low, high, False) def not_between(self, lhs, low, high): return ast.Between(lhs, low, high, True) def like(self, node, pattern): return ast.Like(node, pattern, False, "%", ".", "\\", False) def not_like(self, node, pattern): return ast.Like(node, pattern, False, "%", ".", "\\", True) def ilike(self, node, pattern): return ast.Like(node, pattern, True, "%", ".", "\\", False) def not_ilike(self, node, pattern): return ast.Like(node, pattern, True, "%", ".", "\\", True) def in_(self, node, *options): return ast.In(node, list(options), False) def not_in(self, node, *options): return ast.In(node, list(options), True) def null(self, node): return ast.IsNull(node, False) def not_null(self, node): return ast.IsNull(node, True) def exists(self, attribute): return ast.Exists(attribute, False) def does_not_exist(self, attribute): return ast.Exists(attribute, True) def include(self): return ast.Include(False) def exclude(self): return ast.Include(True) def before(self, node, dt): return ast.TimeBefore(node, dt) def before_or_during(self, node, period): return ast.TimeBeforeOrDuring(node, period) def during(self, node, period): return ast.TimeDuring(node, period) def during_or_after(self, node, period): return ast.TimeDuringOrAfter(node, period) def after(self, node, dt): return ast.TimeAfter(node, dt) def binary_spatial_predicate(self, op, lhs, rhs): return SPATIAL_PREDICATES_MAP[op](lhs, rhs) def relate_spatial_predicate(self, lhs, rhs, pattern): return ast.Relate(lhs, rhs, pattern) def distance_spatial_predicate(self, op, lhs, rhs, distance, units): cls = ast.DistanceWithin if op == "DWITHIN" else ast.DistanceBeyond return cls(lhs, rhs, distance, units) def distance_units(self, value): return value def bbox_spatial_predicate(self, lhs, minx, miny, maxx, maxy, crs=None): return ast.BBox(lhs, minx, miny, maxx, maxy, crs) def function(self, func_name, *expressions): return ast.Function(str(func_name), list(expressions)) def add(self, lhs, rhs): return ast.Add(lhs, rhs) def sub(self, lhs, rhs): return ast.Sub(lhs, rhs) def mul(self, lhs, rhs): return ast.Mul(lhs, rhs) def div(self, lhs, rhs): return ast.Div(lhs, rhs) def neg(self, value): return -value def attribute(self, name): return ast.Attribute(str(name)) def period(self, start, end): return values.Interval(start, end) def INT(self, value): return int(value) def FLOAT(self, value): return float(value) def BOOLEAN(self, value): return value.lower() == "true" def DOUBLE_QUOTED(self, token): return token[1:-1] def SINGLE_QUOTED(self, token): return token[1:-1] def geometry(self, value): return values.Geometry(value) def envelope(self, x1, x2, y1, y2): return values.Envelope(x1, x2, y1, y2) parser = Lark.open( "grammar.lark", rel_to=__file__, parser="lalr", debug=True, maybe_placeholders=False, transformer=ECQLTransformer(), import_paths=[os.path.dirname(os.path.dirname(__file__))], ) def parse(cql_text): return parser.parse(cql_text) pygeofilter-0.3.1/pygeofilter/parsers/fes/000077500000000000000000000000001473475122500206435ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/fes/__init__.py000066400000000000000000000000571473475122500227560ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/fes/base.py000066400000000000000000000144241473475122500221340ustar00rootroot00000000000000import base64 import datetime from pygml.georss import NAMESPACE as NAMESPACE_GEORSS from pygml.georss import parse_georss from pygml.pre_v32 import NAMESPACE as NAMESPACE_PRE_32 from pygml.pre_v32 import NSMAP as NSMAP_PRE_32 from pygml.pre_v32 import parse_pre_v32 from pygml.v32 import NAMESPACE as NAMESPACE_32 from pygml.v32 import NSMAP as NSMAP_32 from pygml.v32 import parse_v32 from pygml.v33 import NAMESPACE as NAMESPACE_33_CE from pygml.v33 import parse_v33_ce from ... import ast, values from ...util import parse_datetime, parse_duration from .gml import is_temporal, parse_temporal from .util import Element, XMLParser, handle, handle_namespace class FESBaseParser(XMLParser): @handle("Filter") def filter_(self, node: Element, predicate): return predicate @handle("And") def and_(self, node: Element, lhs, rhs): return ast.And(lhs, rhs) @handle("Or") def or_(self, node: Element, lhs, rhs): return ast.Or(lhs, rhs) @handle("Not") def not_(self, node: Element, lhs): return ast.Not(lhs) @handle("PropertyIsEqualTo") def property_is_equal_to(self, node: Element, lhs, rhs): return ast.Equal(lhs, rhs) @handle("PropertyIsNotEqualTo") def property_is_not_equal_to(self, node: Element, lhs, rhs): return ast.NotEqual(lhs, rhs) @handle("PropertyIsLessThan") def property_is_less_than(self, node: Element, lhs, rhs): return ast.LessThan(lhs, rhs) @handle("PropertyIsGreaterThan") def property_is_greater_than(self, node: Element, lhs, rhs): return ast.GreaterThan(lhs, rhs) @handle("PropertyIsLessThanOrEqualTo") def property_is_less_than_or_equal_to(self, node: Element, lhs, rhs): return ast.LessEqual(lhs, rhs) @handle("PropertyIsGreaterThanOrEqualTo") def property_is_greater_than_or_equal_to(self, node: Element, lhs, rhs): return ast.GreaterEqual(lhs, rhs) @handle("PropertyIsLike") def property_is_like(self, node: Element, lhs, rhs): return ast.Like( lhs, rhs, wildcard=node.attrib["wildCard"], singlechar=node.attrib["singleChar"], escapechar=node.attrib.get("escape", node.attrib["escapeChar"]), nocase=node.attrib.get("matchCase", "true") == "false", not_=False, ) @handle("PropertyIsNull") def property_is_null(self, node: Element, lhs): return ast.IsNull(lhs, not_=False) @handle("PropertyIsBetween") def property_is_between(self, node: Element, lhs, low, high): return ast.Between(lhs, low, high, False) @handle("LowerBoundary", "UpperBoundary") def boundary(self, node: Element, expression): return expression @handle("Equals") def geometry_equals(self, node: Element, lhs, rhs): return ast.GeometryEquals(lhs, rhs) @handle("Disjoint") def geometry_disjoint(self, node: Element, lhs, rhs): return ast.GeometryDisjoint(lhs, rhs) @handle("Touches") def geometry_touches(self, node: Element, lhs, rhs): return ast.GeometryTouches(lhs, rhs) @handle("Within") def geometry_within(self, node: Element, lhs, rhs): return ast.GeometryWithin(lhs, rhs) @handle("Overlaps") def geometry_overlaps(self, node: Element, lhs, rhs): return ast.GeometryOverlaps(lhs, rhs) @handle("Crosses") def geometry_crosses(self, node: Element, lhs, rhs): return ast.GeometryCrosses(lhs, rhs) @handle("Intersects") def geometry_intersects(self, node: Element, lhs, rhs): return ast.GeometryIntersects(lhs, rhs) @handle("Contains") def geometry_contains(self, node: Element, lhs, rhs): return ast.GeometryContains(lhs, rhs) @handle("DWithin") def distance_within(self, node: Element, lhs, rhs, distance_and_units): distance, units = distance_and_units return ast.DistanceWithin(lhs, rhs, distance, units) @handle("Beyond") def distance_beyond(self, node: Element, lhs, rhs, distance_and_units): distance, units = distance_and_units return ast.DistanceBeyond(lhs, rhs, distance, units) @handle("Distance") def distance(self, node: Element): return (float(node.text), node.attrib["uom"]) # @handle('BBOX') # def geometry_bbox(self, node: Element, lhs, rhs): # # TODO: ast.BBox() seems incompatible # pass @handle("ValueReference") def value_reference(self, node): return ast.Attribute(node.text) @handle("Literal") def literal(self, node): type_ = node.get("type", "").rpartition(":")[2] value = node.text if type_ == "boolean": return value.lower() == "true" elif type_ in ( "byte", "int", "integer", "long", "negativeInteger", "nonNegativeInteger", "nonPositiveInteger", "positiveInteger", "short", "unsignedByte", "unsignedInt", "unsignedLong", "unsignedShort", ): return int(value) elif type_ in ("decimal", "double", "float"): return float(value) elif type_ == "base64Binary": return base64.b64decode(value) elif type_ == "hexBinary": return bytes.fromhex(value) elif type_ == "date": return datetime.date.fromisoformat(value) elif type_ == "dateTime": return parse_datetime(value) elif type_ == "duration": return parse_duration(value) # return to string return value @handle_namespace(NAMESPACE_PRE_32, False) def gml_pre_32(self, node: Element): if is_temporal(node): return parse_temporal(node, NSMAP_PRE_32) return values.Geometry(parse_pre_v32(node)) @handle_namespace(NAMESPACE_32, False) def gml_32(self, node: Element): if is_temporal(node): return parse_temporal(node, NSMAP_32) return values.Geometry(parse_v32(node)) @handle_namespace(NAMESPACE_33_CE, False) def gml_33_ce(self, node: Element): return values.Geometry(parse_v33_ce(node)) @handle_namespace(NAMESPACE_GEORSS, False) def georss(self, node: Element): return values.Geometry(parse_georss(node)) pygeofilter-0.3.1/pygeofilter/parsers/fes/gml.py000066400000000000000000000032451473475122500220000ustar00rootroot00000000000000from datetime import date, datetime, timedelta from typing import Dict, Union from lxml import etree from ... import values from ...util import parse_datetime, parse_duration from .util import Element Temporal = Union[date, datetime, timedelta, values.Interval] def _parse_time_position(node: Element, nsmap: Dict[str, str]) -> datetime: return parse_datetime(node.text) def _parse_time_instant(node: Element, nsmap: Dict[str, str]) -> datetime: position = node.xpath("gml:timePosition", namespaces=nsmap)[0] return _parse_time_position(position, nsmap) def _parse_time_period(node: Element, nsmap: Dict[str, str]) -> values.Interval: begin = node.xpath( "gml:begin/gml:TimeInstant/gml:timePosition|gml:beginPosition", namespaces=nsmap )[0] end = node.xpath( "gml:end/gml:TimeInstant/gml:timePosition|gml:endPosition", namespaces=nsmap )[0] return values.Interval( _parse_time_position(begin, nsmap), _parse_time_position(end, nsmap), ) def _parse_valid_time(node: Element, nsmap: Dict[str, str]) -> Temporal: return parse_temporal(node[0], nsmap) def _parse_duration(node: Element, nsmap: Dict[str, str]) -> timedelta: return parse_duration(node.text) PARSER_MAP = { "validTime": _parse_valid_time, "timePosition": _parse_time_position, "TimeInstant": _parse_time_instant, "TimePeriod": _parse_time_period, "duration": _parse_duration, } def is_temporal(node: Element) -> bool: return etree.QName(node).localname in PARSER_MAP def parse_temporal(node: Element, nsmap: Dict[str, str]) -> Temporal: parser = PARSER_MAP[etree.QName(node).localname] return parser(node, nsmap) pygeofilter-0.3.1/pygeofilter/parsers/fes/parser.py000066400000000000000000000012111473475122500225040ustar00rootroot00000000000000from typing import Union from lxml import etree from ... import ast from .util import Element, ElementTree from .v11 import FES11Parser from .v20 import FES20Parser def parse(xml: Union[str, Element, ElementTree]) -> ast.Node: if isinstance(xml, str): root = etree.fromstring(xml) else: root = xml # decide upon namespace which parser to use namespace = etree.QName(root).namespace if namespace == FES11Parser.namespace: return FES11Parser().parse(root) elif namespace == FES20Parser.namespace: return FES20Parser().parse(root) raise ValueError(f"Unsupported namespace {namespace}") pygeofilter-0.3.1/pygeofilter/parsers/fes/util.py000066400000000000000000000070671473475122500222040ustar00rootroot00000000000000from functools import wraps from typing import Callable, Optional, Type, Union from lxml import etree from ... import ast Element = etree._Element ElementTree = etree._ElementTree ParseInput = Union[etree._Element, etree._ElementTree, str] class NodeParsingError(ValueError): pass class Missing: pass def handle( *tags: str, namespace: Union[str, Type[Missing]] = Missing, subiter: bool = True ) -> Callable: """Function-decorator to mark a class function as a handler for a given node type. """ assert tags @wraps(handle) def inner(func): func.handles_tags = tags func.namespace = namespace func.subiter = subiter return func return inner def handle_namespace(namespace: str, subiter: bool = True) -> Callable: """Function-decorator to mark a class function as a handler for a given namespace. """ @wraps(handle) def inner(func): func.handles_namespace = namespace func.subiter = subiter return func return inner class XMLParserMeta(type): def __init__(cls, name, bases, dct): cls_values = [(cls, dct.values())] cls_namespace = getattr(cls, "namespace", None) for base in bases: cls_namespace = cls_namespace or getattr(base, "namespace", None) cls_values.append((base, base.__dict__.values())) tag_map = {} namespace_map = {} for cls_, values in cls_values: for value in values: if hasattr(value, "handles_tags"): for handled_tag in value.handles_tags: namespace = value.namespace if namespace is Missing: namespace = ( getattr(cls_, "namespace", None) or cls_namespace ) if namespace: if isinstance(namespace, (list, tuple)): namespaces = namespace else: namespaces = [namespace] for namespace in namespaces: full_tag = f"{{{namespace}}}{handled_tag}" tag_map[full_tag] = value else: tag_map[handled_tag] = value if hasattr(value, "handles_namespace"): namespace_map[value.handles_namespace] = value cls.tag_map = tag_map cls.namespace_map = namespace_map class XMLParser(metaclass=XMLParserMeta): namespace: Optional[str] = None tag_map: dict namespace_map: dict def parse(self, input_: ParseInput) -> ast.Node: if isinstance(input_, Element): root = input_ elif isinstance(input_, ElementTree): root = input_.getroot() else: root = etree.fromstring(input_) return self._evaluate_node(root) def _evaluate_node(self, node: etree._Element) -> ast.Node: qname = etree.QName(node.tag) if node.tag in self.tag_map: parse_func = self.tag_map[node.tag] elif qname.namespace in self.namespace_map: parse_func = self.namespace_map[qname.namespace] else: raise NodeParsingError(f"Cannot parse XML tag {node.tag}") if parse_func.subiter: sub_nodes = [self._evaluate_node(child) for child in node.iterchildren()] return parse_func(self, node, *sub_nodes) else: return parse_func(self, node) pygeofilter-0.3.1/pygeofilter/parsers/fes/v11.py000066400000000000000000000016131473475122500216250ustar00rootroot00000000000000from ... import ast from .base import FESBaseParser from .util import Element, ParseInput, handle class FES11Parser(FESBaseParser): namespace = "http://www.opengis.net/ogc" @handle("Add") def add( self, node: Element, lhs: ast.ScalarAstType, rhs: ast.ScalarAstType ) -> ast.Node: return ast.Add(lhs, rhs) @handle("Sub") def sub( self, node: Element, lhs: ast.ScalarAstType, rhs: ast.ScalarAstType ) -> ast.Node: return ast.Sub(lhs, rhs) @handle("Mul") def mul( self, node: Element, lhs: ast.ScalarAstType, rhs: ast.ScalarAstType ) -> ast.Node: return ast.Mul(lhs, rhs) @handle("Div") def div( self, node: Element, lhs: ast.ScalarAstType, rhs: ast.ScalarAstType ) -> ast.Node: return ast.Div(lhs, rhs) def parse(input_: ParseInput) -> ast.Node: return FES11Parser().parse(input_) pygeofilter-0.3.1/pygeofilter/parsers/fes/v20.py000066400000000000000000000062641473475122500216340ustar00rootroot00000000000000import base64 import datetime from ... import ast from ...util import parse_datetime, parse_duration from .base import FESBaseParser from .util import Element, ParseInput, handle class FES20Parser(FESBaseParser): namespace = "http://www.opengis.net/fes/2.0" # @handle('PropertyIsNil') # def property_is_nil(self, node: Element, lhs, rhs): # return ast... @handle("After") def time_after(self, node: Element, lhs, rhs): return ast.TimeAfter(lhs, rhs) @handle("Before") def time_before(self, node: Element, lhs, rhs): return ast.TimeBefore(lhs, rhs) @handle("Begins") def time_begins(self, node: Element, lhs, rhs): return ast.TimeBegins(lhs, rhs) @handle("BegunBy") def time_begun_by(self, node: Element, lhs, rhs): return ast.TimeBegunBy(lhs, rhs) @handle("TContains") def time_contains(self, node: Element, lhs, rhs): return ast.TimeContains(lhs, rhs) @handle("During") def time_during(self, node: Element, lhs, rhs): return ast.TimeDuring(lhs, rhs) @handle("TEquals") def time_equals(self, node: Element, lhs, rhs): return ast.TimeEquals(lhs, rhs) @handle("TOverlaps") def time_overlaps(self, node: Element, lhs, rhs): return ast.TimeOverlaps(lhs, rhs) @handle("Meets") def time_meets(self, node: Element, lhs, rhs): return ast.TimeMeets(lhs, rhs) @handle("OverlappedBy") def time_overlapped_by(self, node: Element, lhs, rhs): return ast.TimeOverlappedBy(lhs, rhs) @handle("MetBy") def time_met_by(self, node: Element, lhs, rhs): return ast.TimeMetBy(lhs, rhs) @handle("Ends") def time_ends(self, node: Element, lhs, rhs): return ast.TimeEnds(lhs, rhs) @handle("EndedBy") def time_ended_by(self, node: Element, lhs, rhs): return ast.TimeEndedBy(lhs, rhs) @handle("ValueReference") def value_reference(self, node: Element): return ast.Attribute(node.text) @handle("Literal") def literal(self, node: Element): type_ = node.get("type").rpartition(":")[2] value = node.text if type_ == "boolean": return value.lower() == "true" elif type_ in ( "byte", "int", "integer", "long", "negativeInteger", "nonNegativeInteger", "nonPositiveInteger", "positiveInteger", "short", "unsignedByte", "unsignedInt", "unsignedLong", "unsignedShort", ): return int(value) elif type_ in ("decimal", "double", "float"): return float(value) elif type_ == "base64Binary": return base64.b64decode(value) elif type_ == "hexBinary": return bytes.fromhex(value) elif type_ == "date": return datetime.date.fromisoformat(value) elif type_ == "dateTime": return parse_datetime(value) elif type_ == "duration": return parse_duration(value) # return to string return value def parse(input_: ParseInput) -> ast.Node: return FES20Parser().parse(input_) pygeofilter-0.3.1/pygeofilter/parsers/iso8601.lark000066400000000000000000000032041473475122500220510ustar00rootroot00000000000000// ------------------------------------------------------------------------------ // // Project: pygeofilter // Authors: Fabian Schindler // // ------------------------------------------------------------------------------ // Copyright (C) 2021 EOX IT Services GmbH // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies of this Software or works derived from this Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ------------------------------------------------------------------------------ DATETIME: /\d{4}-\d{2}-\d{2}T[0-2][0-9]:[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|[+-][0-9]{2}:[0-9]{2})/ DURATION: /P((\d+Y)?(\d+M)?(\d+D)?)?(T(\d+H)?(\d+M)?(\d+S)?)?/ pygeofilter-0.3.1/pygeofilter/parsers/iso8601.py000066400000000000000000000033631473475122500215560ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from lark import Transformer, v_args from ..util import parse_datetime, parse_duration @v_args(meta=False, inline=True) class ISO8601Transformer(Transformer): def DATETIME(self, dt): return parse_datetime(dt) def DURATION(self, duration): return parse_duration(duration) pygeofilter-0.3.1/pygeofilter/parsers/jfe/000077500000000000000000000000001473475122500206325ustar00rootroot00000000000000pygeofilter-0.3.1/pygeofilter/parsers/jfe/__init__.py000066400000000000000000000000571473475122500227450ustar00rootroot00000000000000from .parser import parse __all__ = ["parse"] pygeofilter-0.3.1/pygeofilter/parsers/jfe/parser.py000066400000000000000000000134521473475122500225050ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """ Parser implementation for JFE. Spec here: https://github.com/tschaub/ogcapi-features/tree/json-array-expression/extensions/cql/jfe """ import json from datetime import datetime from typing import Any, Dict, List, Type, Union, cast from ... import ast, values from ...util import parse_datetime COMPARISON_MAP: Dict[str, Type] = { "==": ast.Equal, "!=": ast.NotEqual, "<": ast.LessThan, "<=": ast.LessEqual, ">": ast.GreaterThan, ">=": ast.GreaterEqual, } SPATIAL_PREDICATES_MAP = { "intersects": ast.GeometryIntersects, "within": ast.GeometryWithin, } TEMPORAL_PREDICATES_MAP = { "before": ast.TimeBefore, "after": ast.TimeAfter, "during": ast.TimeDuring, } ARITHMETIC_MAP = { "+": ast.Add, "-": ast.Sub, "*": ast.Mul, "/": ast.Div, } FUNCTION_MAP = { "%": "mod", "^": "pow", } ParseResult = Union[ ast.Node, str, float, int, datetime, values.Geometry, values.Interval, Dict[Any, Any], # TODO: for like wildcards. ] def _parse_node(node: Union[list, dict]) -> ParseResult: # noqa: C901 if isinstance(node, (str, float, int)): return node elif isinstance(node, dict): # wrap geometry, we say that the 'type' property defines if it is a # geometry if "type" in node: return values.Geometry(node) # just return objects for example `like` wildcards else: return node if not isinstance(node, list): raise ValueError(f"Invalid node class {type(node)}") op = node[0] arguments = [_parse_node(sub) for sub in node[1:]] if op in ["all", "any"]: cls = ast.And if op == "all" else ast.Or return cls.from_items(*arguments) elif op == "!": return ast.Not(*cast(List[ast.Node], arguments)) elif op in COMPARISON_MAP: return COMPARISON_MAP[op](*arguments) elif op == "like": wildcard = "%" if len(arguments) > 2: wildcard = cast(dict, arguments[2]).get("wildCard", "%") return ast.Like( cast(ast.Node, arguments[0]), cast(str, arguments[1]), nocase=False, wildcard=wildcard, singlechar=".", escapechar="\\", not_=False, ) elif op == "in": assert isinstance(arguments[0], ast.Node) return ast.In( cast(ast.Node, arguments[0]), cast(List[ast.AstType], arguments[1:]), not_=False, ) elif op in SPATIAL_PREDICATES_MAP: return SPATIAL_PREDICATES_MAP[op](*cast(List[ast.SpatialAstType], arguments)) elif op in TEMPORAL_PREDICATES_MAP: # parse strings to datetimes dt_args = [ parse_datetime(arg) if isinstance(arg, str) else arg for arg in arguments ] if len(arguments) == 3: if isinstance(dt_args[0], datetime) and isinstance(dt_args[1], datetime): dt_args = [ values.Interval(dt_args[0], dt_args[1]), dt_args[2], ] if isinstance(dt_args[1], datetime) and isinstance(dt_args[2], datetime): dt_args = [ dt_args[0], values.Interval(dt_args[1], dt_args[2]), ] return TEMPORAL_PREDICATES_MAP[op](*cast(List[ast.TemporalAstType], dt_args)) # special property getters elif op in ["id", "geometry"]: return ast.Attribute(op) # normal property getter elif op == "get": return ast.Attribute(arguments[0]) elif op == "bbox": pass # TODO elif op in ARITHMETIC_MAP: return ARITHMETIC_MAP[op](*cast(List[ast.ScalarAstType], arguments)) elif op in ["%", "floor", "ceil", "abs", "^", "min", "max"]: return ast.Function( FUNCTION_MAP.get(op, op), cast(List[ast.AstType], arguments) ) raise ValueError(f"Invalid expression operation '{op}'") def parse(jfe: Union[str, list, dict]) -> ast.Node: """Parses the given JFE expression (either a string or an already parsed JSON) to an AST. If a string is passed, it will be parsed as JSON. https://github.com/tschaub/ogcapi-features/tree/json-array-expression/extensions/cql/jfe """ if isinstance(jfe, str): root = json.loads(jfe) else: root = jfe return cast(ast.Node, _parse_node(root)) pygeofilter-0.3.1/pygeofilter/parsers/wkt.lark000066400000000000000000000052561473475122500215560ustar00rootroot00000000000000// ------------------------------------------------------------------------------ // // Project: pygeofilter // Authors: Fabian Schindler // // ------------------------------------------------------------------------------ // Copyright (C) 2021 EOX IT Services GmbH // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies of this Software or works derived from this Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ------------------------------------------------------------------------------ ?ewkt_geometry: "SRID" "=" INT ";" geometry -> geometry_with_srid | geometry ?geometry: point | linestring | polygon | multipoint | multilinestring | multipolygon | geometrycollection geometrycollection: "GEOMETRYCOLLECTION" "(" geometry ( "," geometry )* ")" point: "POINT" "(" coordinate ")" linestring: "LINESTRING" "(" coordinate_list ")" polygon: "POLYGON" "(" coordinate_lists ")" multipoint: "MULTIPOINT" "(" coordinate_list ")" -> multipoint | "MULTIPOINT" "(" "(" coordinate ")" ( "," "(" coordinate ")" )* ")" -> multipoint_2 multilinestring: "MULTILINESTRING" "(" coordinate_lists ")" multipolygon: "MULTIPOLYGON" "(" "(" coordinate_lists ")" ( "," "(" coordinate_lists ")" )* ")" coordinate_lists: "(" coordinate_list ")" ( "," "(" coordinate_list ")" )* ?coordinate_list: coordinate_list "," coordinate | coordinate -> coordinate_list_start coordinate: SIGNED_NUMBER SIGNED_NUMBER [ SIGNED_NUMBER [ SIGNED_NUMBER ] ] // NUMBER: /-?\d+\.?\d+/ %import common.NUMBER %import common.SIGNED_NUMBER %import common.INT pygeofilter-0.3.1/pygeofilter/parsers/wkt.py000066400000000000000000000067311473475122500212540ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from lark import Transformer, v_args @v_args(meta=False, inline=True) class WKTTransformer(Transformer): def wkt__geometry_with_srid(self, srid, geometry): print(srid, geometry) geometry["crs"] = { "type": "name", "properties": {"name": f"urn:ogc:def:crs:EPSG::{srid}"}, } return geometry def wkt__geometrycollection(self, *geometries): return {"type": "GeometryCollection", "geometries": geometries} def wkt__point(self, coordinates): return { "type": "Point", "coordinates": coordinates, } def wkt__linestring(self, coordinate_list): return { "type": "LineString", "coordinates": coordinate_list, } def wkt__polygon(self, coordinate_lists): return { "type": "Polygon", "coordinates": coordinate_lists, } def wkt__multipoint(self, coordinates): return { "type": "MultiPoint", "coordinates": coordinates, } def wkt__multipoint_2(self, *coordinates): print(coordinates) return { "type": "MultiPoint", "coordinates": coordinates, } def wkt__multilinestring(self, coordinate_lists): return { "type": "MultiLineString", "coordinates": coordinate_lists, } def wkt__multipolygon(self, *coordinate_lists): return { "type": "MultiPolygon", "coordinates": coordinate_lists, } def wkt__coordinate_lists(self, *coordinate_lists): return coordinate_lists def wkt__coordinate_list(self, coordinate_list, coordinate): return coordinate_list + (coordinate,) def wkt__coordinate_list_start(self, coordinate_list): return (coordinate_list,) def wkt__coordinate(self, *components): return components def wkt__SIGNED_NUMBER(self, value): return float(value) def wkt__NUMBER(self, value): return float(value) pygeofilter-0.3.1/pygeofilter/util.py000066400000000000000000000114001473475122500177320ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import re from collections.abc import Mapping from datetime import date, datetime, timedelta from dateparser import parse as _parse_datetime __all__ = [ "parse_datetime", "RE_ISO_8601", "parse_duration", "like_pattern_to_re_pattern", "like_pattern_to_re", ] RE_ISO_8601 = re.compile( r"^(?P[+-])?P" r"(?:(?P\d+(\.\d+)?)Y)?" r"(?:(?P\d+(\.\d+)?)M)?" r"(?:(?P\d+(\.\d+)?)D)?" r"T?(?:(?P\d+(\.\d+)?)H)?" r"(?:(?P\d+(\.\d+)?)M)?" r"(?:(?P\d+(\.\d+)?)S)?$" ) def parse_duration(value: str) -> timedelta: """Parses an ISO 8601 duration string into a python timedelta object. Raises a ``ValueError`` if a conversion was not possible. :param value: the ISO8601 duration string to parse :type value: str :return: the parsed duration :rtype: datetime.timedelta """ match = RE_ISO_8601.match(value) if not match: raise ValueError("Could not parse ISO 8601 duration from '%s'." % value) parts = match.groupdict() sign = -1 if "-" == parts["sign"] else 1 days = float(parts["days"] or 0) days += float(parts["months"] or 0) * 30 # ?! days += float(parts["years"] or 0) * 365 # ?! fsec = float(parts["seconds"] or 0) fsec += float(parts["minutes"] or 0) * 60 fsec += float(parts["hours"] or 0) * 3600 return sign * timedelta(days, fsec) def parse_date(value: str) -> date: """Backport for `fromisoformat` for dates in Python 3.6""" return date(*(int(part) for part in value.split("-"))) def parse_datetime(value: str) -> datetime: parsed = _parse_datetime(value) if parsed is None: raise ValueError(value) return parsed def like_pattern_to_re_pattern(like, wildcard, single_char, escape_char): x_wildcard = re.escape(wildcard) x_single_char = re.escape(single_char) dx_wildcard = re.escape(x_wildcard) dx_single_char = re.escape(x_single_char) # special handling if escape char clashes with re escape char if escape_char == "\\": x_escape_char = "\\\\\\\\" else: x_escape_char = re.escape(escape_char) dx_escape_char = re.escape(x_escape_char) pattern = re.escape(like) # handle not escaped wildcards/single chars pattern = re.sub( f"(? int: return 0 pygeofilter-0.3.1/pygeofilter/values.py000066400000000000000000000061151473475122500202630ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from dataclasses import dataclass from datetime import date, datetime, time, timedelta from typing import Any, List, Optional, Union from pygeoif import shape @dataclass class Geometry: geometry: dict @property def __geo_interface__(self): return self.geometry def __eq__(self, o: object) -> bool: return shape(self).__geo_interface__ == shape(o).__geo_interface__ @dataclass class Envelope: x1: float x2: float y1: float y2: float @property def geometry(self): return { "type": "Polygon", "coordinates": [ [ [self.x1, self.y1], [self.x1, self.y2], [self.x2, self.y2], [self.x2, self.y1], [self.x1, self.y1], ] ], } @property def __geo_interface__(self): return self.geometry def __eq__(self, o: object) -> bool: return shape(self).__geo_interface__ == shape(o).__geo_interface__ @dataclass class Interval: start: Optional[Union[date, datetime, timedelta]] = None end: Optional[Union[date, datetime, timedelta]] = None def get_sub_nodes(self) -> List[Any]: # TODO: find way to type this return [self.start, self.end] # used for handler declaration LITERALS = (list, str, float, int, bool, datetime, date, time, timedelta) # used for type checking SpatialValueType = Union[Geometry, Envelope] TemporalValueType = Union[date, datetime, timedelta, Interval] ValueType = Union[ SpatialValueType, TemporalValueType, bool, float, int, str, ] pygeofilter-0.3.1/pygeofilter/version.py000066400000000000000000000000261473475122500204440ustar00rootroot00000000000000__version__ = "0.3.1" pygeofilter-0.3.1/pyproject.toml000066400000000000000000000000701473475122500167670ustar00rootroot00000000000000[build-system] requires = ["setuptools>=46.4", "wheel"] pygeofilter-0.3.1/requirements-dev.txt000066400000000000000000000001001473475122500201050ustar00rootroot00000000000000flake8 pytest pytest-django wheel mypy<=1.10.0 types-dateparser pygeofilter-0.3.1/requirements-test.txt000066400000000000000000000002161473475122500203160ustar00rootroot00000000000000django geoalchemy2 sqlalchemy geopandas fiona pyproj rtree pygml dateparser lark elasticsearch elasticsearch-dsl opensearch-py opensearch-dsl pygeofilter-0.3.1/setup.cfg000066400000000000000000000006071473475122500157020ustar00rootroot00000000000000[metadata] version = attr: pygeofilter.version.__version__ ###################################################### # code formating / lint / type checking configurations [isort] profile = black default_section = THIRDPARTY [flake8] ignore = E501,W503,E203 exclude = .git,__pycache__,docs/conf.py,old,build,dist max-complexity = 12 max-line-length = 80 [mypy] ignore_missing_imports = True pygeofilter-0.3.1/setup.py000066400000000000000000000063131473475122500155730ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ """Install pygeofilter.""" import os import os.path from setuptools import find_packages, setup # don't install dependencies when building win readthedocs on_rtd = os.environ.get("READTHEDOCS") == "True" # use README.md for project long_description with open("README.md") as f: readme = f.read() description = ( "pygeofilter is a pure Python parser implementation of OGC filtering standards" ) setup( name="pygeofilter", description=description, long_description=readme, long_description_content_type="text/markdown", author="Fabian Schindler", author_email="fabian.schindler@eox.at", url="https://github.com/geopython/pygeofilter", license="MIT", packages=find_packages(), include_package_data=True, install_requires=( [ "dateparser", "lark", "pygeoif>=1.0.0", "dataclasses;python_version<'3.7'", ] if not on_rtd else [] ), extras_require={ "backend-django": ["django"], "backend-sqlalchemy": ["geoalchemy2", "sqlalchemy"], "backend-native": ["shapely"], "backend-elasticsearch": ["elasticsearch", "elasticsearch-dsl"], "backend-opensearch": ["opensearch-py", "opensearch-dsl"], "fes": ["pygml>=0.2"], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Scientific/Engineering :: GIS", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ], tests_require=["pytest"], ) pygeofilter-0.3.1/tests/000077500000000000000000000000001473475122500152205ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/000077500000000000000000000000001473475122500167725ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/__init__.py000066400000000000000000000000001473475122500210710ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/000077500000000000000000000000001473475122500202345ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/conftest.py000066400000000000000000000021161473475122500224330ustar00rootroot00000000000000import django import pytest from django.conf import settings from django.core.management import call_command def pytest_configure(): settings.configure( SECRET_KEY="secret", INSTALLED_APPS=[ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "django.contrib.gis", "testapp", ], DATABASES={ "default": { "ENGINE": "django.contrib.gis.db.backends.spatialite", "NAME": "db.sqlite", "TEST": { "NAME": ":memory:", }, } }, LANGUAGE_CODE="en-us", TIME_ZONE="UTC", USE_I18N=True, USE_L10N=True, USE_TZ=True, ) django.setup() @pytest.fixture(scope="session") def django_db_setup(django_db_setup, django_db_blocker): with django_db_blocker.unblock(): call_command("loaddata", "test.json") pygeofilter-0.3.1/tests/backends/django/test_django_evaluate.py000066400000000000000000000260641473475122500250050ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import pytest from testapp import models from pygeofilter.backends.django.evaluate import to_filter from pygeofilter.parsers.ecql import parse def evaluate(cql_expr, expected_ids, model_type=None): model_type = model_type or models.Record mapping = models.FIELD_MAPPING mapping_choices = models.MAPPING_CHOICES ast = parse(cql_expr) filters = to_filter(ast, mapping, mapping_choices) qs = model_type.objects.filter(filters) assert expected_ids == type(expected_ids)(qs.values_list("identifier", flat=True)) # common comparisons @pytest.mark.django_db def test_id_eq(): evaluate("identifier = 'A'", ("A",)) @pytest.mark.django_db def test_id_eq_2(): evaluate("'A' = identifier", ("A",)) @pytest.mark.django_db def test_id_ne(): evaluate("identifier <> 'B'", ("A",)) @pytest.mark.django_db def test_float_lt(): evaluate("floatAttribute < 30", ("A",)) @pytest.mark.django_db def test_float_le(): evaluate("floatAttribute <= 20", ("A",)) @pytest.mark.django_db def test_float_le_inv(): evaluate("20 >= floatAttribute", ("A",)) @pytest.mark.django_db def test_float_gt(): evaluate("floatAttribute > 20", ("B",)) @pytest.mark.django_db def test_float_gt_2(): evaluate("20 < floatAttribute", ("B",)) @pytest.mark.django_db def test_float_ge(): evaluate("floatAttribute >= 30", ("B",)) @pytest.mark.django_db def test_float_ge_inv(): evaluate("30 <= floatAttribute", ("B",)) @pytest.mark.django_db def test_float_between(): evaluate("floatAttribute BETWEEN -1 AND 1", ("A",)) # test different field types @pytest.mark.django_db def test_common_value_eq(): evaluate("strAttribute = 'AAA'", ("A",)) @pytest.mark.django_db def test_common_value_eq_inv(): evaluate("'AAA' = strAttribute", ("A",)) @pytest.mark.django_db def test_common_value_in(): evaluate("strAttribute IN ('AAA', 'XXX')", ("A",)) @pytest.mark.django_db def test_common_value_like(): evaluate("strAttribute LIKE 'AA%'", ("A",)) @pytest.mark.django_db def test_common_value_like_middle(): evaluate("strAttribute LIKE 'A%A'", ("A",)) # TODO: resolve from choice? # def test_enum_value_eq(): # evaluate( # 'choiceAttribute = "A"', # ('A',) # ) # def test_enum_value_in(): # evaluate( # 'choiceAttribute IN ("ASCENDING")', # ('A',) # ) # def test_enum_value_like(): # evaluate( # 'choiceAttribute LIKE "ASCEN%"', # ('A',) # ) # def test_enum_value_ilike(): # evaluate( # 'choiceAttribute ILIKE "ascen%"', # ('A',) # ) # def test_enum_value_ilike_start_middle_end(): # evaluate( # r'choiceAttribute ILIKE "a%en%ing"', # ('A',) # ) # (NOT) LIKE | ILIKE @pytest.mark.django_db def test_like_beginswith(): evaluate("strMetaAttribute LIKE 'A%'", ("A",)) @pytest.mark.django_db def test_ilike_beginswith(): evaluate("strMetaAttribute ILIKE 'a%'", ("A",)) @pytest.mark.django_db def test_like_endswith(): evaluate("strMetaAttribute LIKE '%A'", ("A",)) @pytest.mark.django_db def test_ilike_endswith(): evaluate("strMetaAttribute ILIKE '%a'", ("A",)) @pytest.mark.django_db def test_like_middle(): evaluate("strMetaAttribute LIKE '%parent%'", ("A", "B")) @pytest.mark.django_db def test_like_startswith_middle(): evaluate("strMetaAttribute LIKE 'A%rent%'", ("A",)) @pytest.mark.django_db def test_like_middle_endswith(): evaluate("strMetaAttribute LIKE '%ren%A'", ("A",)) @pytest.mark.django_db def test_like_startswith_middle_endswith(): evaluate("strMetaAttribute LIKE 'A%ren%A'", ("A",)) @pytest.mark.django_db def test_ilike_middle(): evaluate("strMetaAttribute ILIKE '%PaReNT%'", ("A", "B")) @pytest.mark.django_db def test_not_like_beginswith(): evaluate("strMetaAttribute NOT LIKE 'B%'", ("A",)) @pytest.mark.django_db def test_not_ilike_beginswith(): evaluate("strMetaAttribute NOT ILIKE 'b%'", ("A",)) @pytest.mark.django_db def test_not_like_endswith(): evaluate("strMetaAttribute NOT LIKE '%B'", ("A",)) @pytest.mark.django_db def test_not_ilike_endswith(): evaluate("strMetaAttribute NOT ILIKE '%b'", ("A",)) # (NOT) IN @pytest.mark.django_db def test_string_in(): evaluate("identifier IN ('A', 'B')", ("A", "B")) @pytest.mark.django_db def test_string_not_in(): evaluate("identifier NOT IN ('B', 'C')", ("A",)) # (NOT) NULL @pytest.mark.django_db def test_string_null(): evaluate("intAttribute IS NULL", ("B",)) @pytest.mark.django_db def test_string_not_null(): evaluate("intAttribute IS NOT NULL", ("A",)) # temporal predicates @pytest.mark.django_db def test_before(): evaluate("datetimeAttribute BEFORE 2000-01-01T00:00:01Z", ("A",)) @pytest.mark.django_db def test_before_or_during_dt_dt(): evaluate( "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / 2000-01-01T00:00:01Z", ("A",), ) @pytest.mark.django_db def test_before_or_during_dt_td(): evaluate( "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / PT4S", ("A",) ) @pytest.mark.django_db def test_before_or_during_td_dt(): evaluate( "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",) ) @pytest.mark.django_db def test_during_td_dt(): evaluate( "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",) ) # TODO: test DURING OR AFTER / AFTER # spatial predicates @pytest.mark.django_db def test_intersects_point(): evaluate("INTERSECTS(geometry, POINT(1 1.0))", ("A",)) @pytest.mark.django_db def test_intersects_point_inv(): evaluate("INTERSECTS(POINT(1 1.0), geometry)", ("A",)) @pytest.mark.django_db def test_intersects_mulitipoint_1(): evaluate("INTERSECTS(geometry, MULTIPOINT(0 0, 1 1))", ("A",)) @pytest.mark.django_db def test_intersects_mulitipoint_1_inv(): evaluate("INTERSECTS(MULTIPOINT(0 0, 1 1), geometry)", ("A",)) @pytest.mark.django_db def test_intersects_mulitipoint_2(): evaluate("INTERSECTS(geometry, MULTIPOINT((0 0), (1 1)))", ("A",)) @pytest.mark.django_db def test_intersects_mulitipoint_2_inv(): evaluate("INTERSECTS(MULTIPOINT((0 0), (1 1)), geometry)", ("A",)) @pytest.mark.django_db def test_intersects_linestring(): evaluate("INTERSECTS(geometry, LINESTRING(0 0, 1 1))", ("A",)) @pytest.mark.django_db def test_intersects_linestring__inv(): evaluate("INTERSECTS(LINESTRING(0 0, 1 1), geometry)", ("A",)) @pytest.mark.django_db def test_intersects_multilinestring(): evaluate("INTERSECTS(geometry, MULTILINESTRING((0 0, 1 1), (2 1, 1 2)))", ("A",)) @pytest.mark.django_db def test_intersects_multilinestring_inv(): evaluate("INTERSECTS(MULTILINESTRING((0 0, 1 1), (2 1, 1 2)), geometry)", ("A",)) @pytest.mark.django_db def test_intersects_polygon(): evaluate( "INTERSECTS(geometry, " "POLYGON((0 0, 3 0, 3 3, 0 3, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1)))", ("A",), ) @pytest.mark.django_db def test_intersects_polygon_inv(): evaluate( "INTERSECTS(" "POLYGON((0 0, 3 0, 3 3, 0 3, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1)), " "geometry)", ("A",), ) @pytest.mark.django_db def test_intersects_multipolygon(): evaluate( "INTERSECTS(geometry, " "MULTIPOLYGON(((0 0, 3 0, 3 3, 0 3, 0 0), " "(1 1, 2 1, 2 2, 1 2, 1 1))))", ("A",), ) @pytest.mark.django_db def test_intersects_multipolygon_inv(): evaluate( "INTERSECTS(" "MULTIPOLYGON(((0 0, 3 0, 3 3, 0 3, 0 0), " "(1 1, 2 1, 2 2, 1 2, 1 1))), " "geometry)", ("A",), ) @pytest.mark.django_db def test_intersects_envelope(): evaluate("INTERSECTS(geometry, ENVELOPE(0 1.0 0 1.0))", ("A",)) @pytest.mark.django_db def test_intersects_envelope_inv(): evaluate("INTERSECTS(ENVELOPE(0 1.0 0 1.0), geometry)", ("A",)) @pytest.mark.django_db def test_dwithin(): evaluate("DWITHIN(geometry, POINT(0 0), 10, meters)", ("A",)) @pytest.mark.django_db def test_dwithin_inv(): evaluate("DWITHIN(POINT(0 0), geometry, 10, meters)", ("A",)) @pytest.mark.django_db def test_beyond(): evaluate("BEYOND(geometry, POINT(0 0), 10, meters)", ("B",)) @pytest.mark.django_db def test_beyond_inv(): evaluate("BEYOND(POINT(0 0), geometry, 10, meters)", ("B",)) @pytest.mark.django_db def test_bbox(): evaluate("BBOX(geometry, 0, 0, 1, 1, 'EPSG:4326')", ("A",)) # TODO: other relation methods # arithmethic expressions @pytest.mark.django_db def test_arith_simple_plus(): evaluate("intMetaAttribute = 10 + 10", ("A",)) @pytest.mark.django_db def test_arith_simple_plus_inv(): evaluate("10 + 10 = intMetaAttribute", ("A",)) @pytest.mark.django_db def test_arith_field_plus_1(): evaluate("intMetaAttribute = floatMetaAttribute + 10", ("A", "B")) @pytest.mark.django_db def test_arith_field_plus_1_inv(): evaluate("floatMetaAttribute + 10 = intMetaAttribute", ("A", "B")) @pytest.mark.django_db def test_arith_field_plus_2(): evaluate("intMetaAttribute = 10 + floatMetaAttribute", ("A", "B")) @pytest.mark.django_db def test_arith_field_plus_2_inv(): evaluate("10 + floatMetaAttribute = intMetaAttribute", ("A", "B")) @pytest.mark.django_db def test_arith_field_plus_field(): evaluate("intMetaAttribute = " "floatMetaAttribute + intAttribute", ("A",)) @pytest.mark.django_db def test_arith_field_plus_field_inv(): evaluate("floatMetaAttribute + intAttribute" "= intMetaAttribute", ("A",)) @pytest.mark.django_db def test_arith_field_plus_mul_1(): evaluate("intMetaAttribute = intAttribute * 1.5 + 5", ("A",)) @pytest.mark.django_db def test_arith_field_plus_mul_2(): evaluate("intMetaAttribute = 5 + intAttribute * 1.5", ("A",)) pygeofilter-0.3.1/tests/backends/django/testapp/000077500000000000000000000000001473475122500217145ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/testapp/__init__.py000066400000000000000000000000001473475122500240130ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/testapp/admin.py000066400000000000000000000027451473475122500233660ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ # Register your models here. pygeofilter-0.3.1/tests/backends/django/testapp/apps.py000066400000000000000000000030411473475122500232270ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from django.apps import AppConfig class TestappConfig(AppConfig): name = "testapp" pygeofilter-0.3.1/tests/backends/django/testapp/fixtures/000077500000000000000000000000001473475122500235655ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/testapp/fixtures/test.json000066400000000000000000000024501473475122500254400ustar00rootroot00000000000000[ { "model": "testapp.record", "pk": 1, "fields": { "identifier": "A", "geometry": "SRID=4326;MULTIPOLYGON (((0 0, 0 5, 5 5, 5 0, 0 0)))", "float_attribute": 0.0, "int_attribute": 10, "str_attribute": "AAA", "datetime_attribute": "2000-01-01T00:00:00Z", "choice_attribute": 1 } }, { "model": "testapp.record", "pk": 2, "fields": { "identifier": "B", "geometry": "SRID=4326;MULTIPOLYGON (((5 5, 5 10, 10 10, 10 5, 5 5)))", "float_attribute": 30.0, "int_attribute": null, "str_attribute": "BBB", "datetime_attribute": "2000-01-01T00:00:05Z", "choice_attribute": 2 } }, { "model": "testapp.recordmeta", "pk": 1, "fields": { "record": 1, "float_meta_attribute": 10.0, "int_meta_attribute": 20, "str_meta_attribute": "AparentA", "datetime_meta_attribute": "2000-01-01T00:00:05Z", "choice_meta_attribute": 1 } }, { "model": "testapp.recordmeta", "pk": 2, "fields": { "record": 2, "float_meta_attribute": 20.0, "int_meta_attribute": 30, "str_meta_attribute": "BparentB", "datetime_meta_attribute": "2000-01-01T00:00:10Z", "choice_meta_attribute": 2 } } ] pygeofilter-0.3.1/tests/backends/django/testapp/migrations/000077500000000000000000000000001473475122500240705ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/testapp/migrations/0001_initial.py000066400000000000000000000056341473475122500265430ustar00rootroot00000000000000# flake8: noqa # Generated by Django 2.2.5 on 2019-09-09 07:18 import django.contrib.gis.db.models.fields import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Record", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("identifier", models.CharField(max_length=256, unique=True)), ( "geometry", django.contrib.gis.db.models.fields.GeometryField(srid=4326), ), ("float_attribute", models.FloatField(blank=True, null=True)), ("int_attribute", models.IntegerField(blank=True, null=True)), ( "str_attribute", models.CharField(blank=True, max_length=256, null=True), ), ("datetime_attribute", models.DateTimeField(blank=True, null=True)), ( "choice_attribute", models.PositiveSmallIntegerField( blank=True, choices=[(1, "A"), (2, "B"), (3, "C")], null=True ), ), ], ), migrations.CreateModel( name="RecordMeta", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("float_meta_attribute", models.FloatField(blank=True, null=True)), ("int_meta_attribute", models.IntegerField(blank=True, null=True)), ( "str_meta_attribute", models.CharField(blank=True, max_length=256, null=True), ), ( "datetime_meta_attribute", models.DateTimeField(blank=True, null=True), ), ( "choice_meta_attribute", models.PositiveSmallIntegerField( blank=True, choices=[(1, "X"), (2, "Y"), (3, "Z")], null=True ), ), ( "record", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="record_metas", to="testapp.Record", ), ), ], ), ] pygeofilter-0.3.1/tests/backends/django/testapp/migrations/__init__.py000066400000000000000000000000001473475122500261670ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/django/testapp/models.py000066400000000000000000000066221473475122500235570ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ # flake8: noqa from django.contrib.gis.db import models optional = dict(null=True, blank=True) class Record(models.Model): identifier = models.CharField(max_length=256, unique=True, null=False) geometry = models.GeometryField() float_attribute = models.FloatField(**optional) int_attribute = models.IntegerField(**optional) str_attribute = models.CharField(max_length=256, **optional) datetime_attribute = models.DateTimeField(**optional) choice_attribute = models.PositiveSmallIntegerField( choices=[ (1, "ASCENDING"), (2, "DESCENDING"), ], **optional ) class RecordMeta(models.Model): record = models.ForeignKey( Record, on_delete=models.CASCADE, related_name="record_metas" ) float_meta_attribute = models.FloatField(**optional) int_meta_attribute = models.IntegerField(**optional) str_meta_attribute = models.CharField(max_length=256, **optional) datetime_meta_attribute = models.DateTimeField(**optional) choice_meta_attribute = models.PositiveSmallIntegerField( choices=[(1, "X"), (2, "Y"), (3, "Z")], **optional ) FIELD_MAPPING = { "identifier": "identifier", "geometry": "geometry", "floatAttribute": "float_attribute", "intAttribute": "int_attribute", "strAttribute": "str_attribute", "datetimeAttribute": "datetime_attribute", "choiceAttribute": "choice_attribute", # meta fields "floatMetaAttribute": "record_metas__float_meta_attribute", "intMetaAttribute": "record_metas__int_meta_attribute", "strMetaAttribute": "record_metas__str_meta_attribute", "datetimeMetaAttribute": "record_metas__datetime_meta_attribute", "choiceMetaAttribute": "record_metas__choice_meta_attribute", } MAPPING_CHOICES = { "choiceAttribute": dict(Record._meta.get_field("choice_attribute").choices), "choiceMetaAttribute": dict( RecordMeta._meta.get_field("choice_meta_attribute").choices ), } pygeofilter-0.3.1/tests/backends/django/testapp/tests.py000066400000000000000000000213621473475122500234340ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from django.test import TransactionTestCase from pygeofilter.backends.django.evaluate import to_filter from pygeofilter.parsers.ecql import parse from . import models class CQLTestCase(TransactionTestCase): fixtures = ["test.json"] def evaluate(self, cql_expr, expected_ids, model_type=None): model_type = model_type or models.Record mapping = models.FIELD_MAPPING mapping_choices = models.MAPPING_CHOICES ast = parse(cql_expr) filters = to_filter(ast, mapping, mapping_choices) qs = model_type.objects.filter(filters) self.assertEqual( expected_ids, type(expected_ids)(qs.values_list("identifier", flat=True)) ) # common comparisons def test_id_eq(self): self.evaluate("identifier = 'A'", ("A",)) def test_id_ne(self): self.evaluate("identifier <> 'B'", ("A",)) def test_float_lt(self): self.evaluate("floatAttribute < 30", ("A",)) def test_float_le(self): self.evaluate("floatAttribute <= 20", ("A",)) def test_float_gt(self): self.evaluate("floatAttribute > 20", ("B",)) def test_float_ge(self): self.evaluate("floatAttribute >= 30", ("B",)) def test_float_between(self): self.evaluate("floatAttribute BETWEEN -1 AND 1", ("A",)) # test different field types def test_common_value_eq(self): self.evaluate("strAttribute = 'AAA'", ("A",)) def test_common_value_in(self): self.evaluate("strAttribute IN ('AAA', 'XXX')", ("A",)) def test_common_value_like(self): self.evaluate("strAttribute LIKE 'AA%'", ("A",)) def test_common_value_like_middle(self): self.evaluate("strAttribute LIKE 'A%A'", ("A",)) # TODO: resolve from choice? # def test_enum_value_eq(self): # self.evaluate( # 'choiceAttribute = "A"', # ('A',) # ) # def test_enum_value_in(self): # self.evaluate( # 'choiceAttribute IN ("ASCENDING")', # ('A',) # ) # def test_enum_value_like(self): # self.evaluate( # 'choiceAttribute LIKE "ASCEN%"', # ('A',) # ) # def test_enum_value_ilike(self): # self.evaluate( # 'choiceAttribute ILIKE "ascen%"', # ('A',) # ) # def test_enum_value_ilike_start_middle_end(self): # self.evaluate( # r'choiceAttribute ILIKE "a%en%ing"', # ('A',) # ) # (NOT) LIKE | ILIKE def test_like_beginswith(self): self.evaluate("strMetaAttribute LIKE 'A%'", ("A",)) def test_ilike_beginswith(self): self.evaluate("strMetaAttribute ILIKE 'a%'", ("A",)) def test_like_endswith(self): self.evaluate("strMetaAttribute LIKE '%A'", ("A",)) def test_ilike_endswith(self): self.evaluate("strMetaAttribute ILIKE '%a'", ("A",)) def test_like_middle(self): self.evaluate("strMetaAttribute LIKE '%parent%'", ("A", "B")) def test_like_startswith_middle(self): self.evaluate("strMetaAttribute LIKE 'A%rent%'", ("A",)) def test_like_middle_endswith(self): self.evaluate("strMetaAttribute LIKE '%ren%A'", ("A",)) def test_like_startswith_middle_endswith(self): self.evaluate("strMetaAttribute LIKE 'A%ren%A'", ("A",)) def test_ilike_middle(self): self.evaluate("strMetaAttribute ILIKE '%PaReNT%'", ("A", "B")) def test_not_like_beginswith(self): self.evaluate("strMetaAttribute NOT LIKE 'B%'", ("A",)) def test_not_ilike_beginswith(self): self.evaluate("strMetaAttribute NOT ILIKE 'b%'", ("A",)) def test_not_like_endswith(self): self.evaluate("strMetaAttribute NOT LIKE '%B'", ("A",)) def test_not_ilike_endswith(self): self.evaluate("strMetaAttribute NOT ILIKE '%b'", ("A",)) # (NOT) IN def test_string_in(self): self.evaluate("identifier IN ('A', 'B')", ("A", "B")) def test_string_not_in(self): self.evaluate("identifier NOT IN ('B', 'C')", ("A",)) # (NOT) NULL def test_string_null(self): self.evaluate("intAttribute IS NULL", ("B",)) def test_string_not_null(self): self.evaluate("intAttribute IS NOT NULL", ("A",)) # temporal predicates def test_before(self): self.evaluate("datetimeAttribute BEFORE 2000-01-01T00:00:01Z", ("A",)) def test_before_or_during_dt_dt(self): self.evaluate( "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / 2000-01-01T00:00:01Z", ("A",), ) def test_before_or_during_dt_td(self): self.evaluate( "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / PT4S", ("A",) ) def test_before_or_during_td_dt(self): self.evaluate( "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",) ) def test_during_td_dt(self): self.evaluate( "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",) ) # TODO: test DURING OR AFTER / AFTER # spatial predicates def test_intersects_point(self): self.evaluate("INTERSECTS(geometry, POINT(1 1.0))", ("A",)) def test_intersects_mulitipoint_1(self): self.evaluate("INTERSECTS(geometry, MULTIPOINT(0 0, 1 1))", ("A",)) def test_intersects_mulitipoint_2(self): self.evaluate("INTERSECTS(geometry, MULTIPOINT((0 0), (1 1)))", ("A",)) def test_intersects_linestring(self): self.evaluate("INTERSECTS(geometry, LINESTRING(0 0, 1 1))", ("A",)) def test_intersects_multilinestring(self): self.evaluate( "INTERSECTS(geometry, MULTILINESTRING((0 0, 1 1), (2 1, 1 2)))", ("A",) ) def test_intersects_polygon(self): self.evaluate( "INTERSECTS(geometry, " "POLYGON((0 0, 3 0, 3 3, 0 3, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1)))", ("A",), ) def test_intersects_multipolygon(self): self.evaluate( "INTERSECTS(geometry, " "MULTIPOLYGON(((0 0, 3 0, 3 3, 0 3, 0 0), " "(1 1, 2 1, 2 2, 1 2, 1 1))))", ("A",), ) def test_intersects_envelope(self): self.evaluate("INTERSECTS(geometry, ENVELOPE(0 1.0 0 1.0))", ("A",)) def test_dwithin(self): self.evaluate("DWITHIN(geometry, POINT(0 0), 10, meters)", ("A",)) def test_beyond(self): self.evaluate("BEYOND(geometry, POINT(0 0), 10, meters)", ("B",)) def test_bbox(self): self.evaluate("BBOX(geometry, 0, 0, 1, 1, 'EPSG:4326')", ("A",)) # TODO: other relation methods # arithmethic expressions def test_arith_simple_plus(self): self.evaluate("intMetaAttribute = 10 + 10", ("A",)) def test_arith_field_plus_1(self): self.evaluate("intMetaAttribute = floatMetaAttribute + 10", ("A", "B")) def test_arith_field_plus_2(self): self.evaluate("intMetaAttribute = 10 + floatMetaAttribute", ("A", "B")) def test_arith_field_plus_field(self): self.evaluate("intMetaAttribute = " "floatMetaAttribute + intAttribute", ("A",)) def test_arith_field_plus_mul_1(self): self.evaluate("intMetaAttribute = intAttribute * 1.5 + 5", ("A",)) def test_arith_field_plus_mul_2(self): self.evaluate("intMetaAttribute = 5 + intAttribute * 1.5", ("A",)) pygeofilter-0.3.1/tests/backends/django/testapp/views.py000066400000000000000000000027421473475122500234300ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ # Create your views here. pygeofilter-0.3.1/tests/backends/elasticsearch/000077500000000000000000000000001473475122500216045ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/elasticsearch/__init__.py000066400000000000000000000000001473475122500237030ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/elasticsearch/test_evaluate.py000066400000000000000000000203601473475122500250240ustar00rootroot00000000000000# pylint: disable=W0621,C0114,C0115,C0116 import pytest from elasticsearch_dsl import ( Date, DateRange, Document, Field, Float, GeoPoint, GeoShape, Index, InnerDoc, Integer, Nested, Range, Text, connections, ) from pygeofilter import ast from pygeofilter.backends.elasticsearch import to_filter from pygeofilter.parsers.ecql import parse from pygeofilter.util import parse_datetime class Wildcard(Field): name = "wildcard" class RecordMeta(InnerDoc): float_meta_attribute = Float() int_meta_attribute = Integer() str_meta_attribute = Text() datetime_meta_attribute = Date() class Record(Document): identifier = Text() geometry = GeoShape() center = GeoPoint() float_attribute = Float() int_attribute = Integer() str_attribute = Wildcard() maybe_str_attribute = Text() datetime_attribute = Date() daterange_attribute = DateRange() record_metas = Nested(RecordMeta) class Index: name = "record" @pytest.fixture(autouse=True, scope="session") def connection(): connections.create_connection( hosts=["http://localhost:9200"], ) @pytest.fixture(autouse=True, scope="session") def index(connection): Record.init() index = Index(Record.Index.name) yield index index.delete() @pytest.fixture(autouse=True, scope="session") def data(index): """Fixture to add initial data to the search index.""" record_a = Record( identifier="A", geometry="MULTIPOLYGON(((0 0, 0 5, 5 5,5 0,0 0)))", center="POINT(2.5 2.5)", float_attribute=0.0, int_attribute=5, str_attribute="this is a test", maybe_str_attribute=None, datetime_attribute=parse_datetime("2000-01-01T00:00:00Z"), daterange_attribute=Range( gte=parse_datetime("2000-01-01T00:00:00Z"), lte=parse_datetime("2000-01-02T00:00:00Z"), ), ) record_a.save() record_b = Record( identifier="B", geometry="MULTIPOLYGON(((5 5, 5 10, 10 10,10 5,5 5)))", center="POINT(7.5 7.5)", float_attribute=30.0, int_attribute=None, str_attribute="this is another test", maybe_str_attribute="some value", datetime_attribute=parse_datetime("2000-01-01T00:00:10Z"), daterange_attribute=Range( gte=parse_datetime("2000-01-04T00:00:00Z"), lte=parse_datetime("2000-01-05T00:00:00Z"), ), ) record_b.save() index.refresh() yield [record_a, record_b] def filter_(ast_): query = to_filter(ast_, version="8.2") print(query) result = Record.search().query(query).execute() print([r.identifier for r in result]) return result def test_comparison(data): result = filter_(parse("int_attribute = 5")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute < 6")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute > 6")) assert len(result) == 1 and result[0].identifier == data[1].identifier result = filter_(parse("int_attribute <= 5")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute >= 8")) assert len(result) == 1 and result[0].identifier == data[1].identifier result = filter_(parse("float_attribute <> 0.0")) assert len(result) == 1 and result[0].identifier == data[1].identifier def test_combination(data): result = filter_(parse("int_attribute = 5 AND float_attribute < 6.0")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("int_attribute = 6 OR float_attribute < 6.0")) assert len(result) == 1 and result[0].identifier is data[0].identifier def test_between(data): result = filter_(parse("float_attribute BETWEEN -1 AND 1")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("int_attribute NOT BETWEEN 4 AND 6")) assert len(result) == 1 and result[0].identifier is data[1].identifier def test_like(data): result = filter_(parse("str_attribute LIKE 'this is a test'")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("str_attribute LIKE 'this is % test'")) assert len(result) == 2 result = filter_(parse("str_attribute NOT LIKE '% another test'")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("str_attribute NOT LIKE 'this is . test'")) assert len(result) == 1 and result[0].identifier is data[1].identifier result = filter_(parse("str_attribute ILIKE 'THIS IS . TEST'")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("str_attribute ILIKE 'THIS IS % TEST'")) assert len(result) == 2 def test_in(data): result = filter_(parse("int_attribute IN ( 1, 2, 3, 4, 5 )")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("int_attribute NOT IN ( 1, 2, 3, 4, 5 )")) assert len(result) == 1 and result[0].identifier is data[1].identifier def test_null(data): result = filter_(parse("maybe_str_attribute IS NULL")) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_(parse("maybe_str_attribute IS NOT NULL")) assert len(result) == 1 and result[0].identifier is data[1].identifier def test_has_attr(): result = filter_(parse("extra_attr EXISTS")) assert len(result) == 0 result = filter_(parse("extra_attr DOES-NOT-EXIST")) assert len(result) == 2 def test_temporal(data): result = filter_( ast.TimeDisjoint( ast.Attribute("datetime_attribute"), [ parse_datetime("2000-01-01T00:00:05.00Z"), parse_datetime("2000-01-01T00:00:15.00Z"), ], ) ) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_( parse("datetime_attribute BEFORE 2000-01-01T00:00:05.00Z"), ) assert len(result) == 1 and result[0].identifier is data[0].identifier result = filter_( parse("datetime_attribute AFTER 2000-01-01T00:00:05.00Z"), ) assert len(result) == 1 and result[0].identifier is data[1].identifier # def test_array(): # result = filter_( # ast.ArrayEquals( # ast.Attribute('array_attr'), # [2, 3], # ), # data # ) # assert len(result) == 1 and result[0] is data[0] # result = filter_( # ast.ArrayContains( # ast.Attribute('array_attr'), # [1, 2, 3, 4], # ), # data # ) # assert len(result) == 1 and result[0] is data[1] # result = filter_( # ast.ArrayContainedBy( # ast.Attribute('array_attr'), # [1, 2, 3, 4], # ), # data # ) # assert len(result) == 1 and result[0] is data[0] # result = filter_( # ast.ArrayOverlaps( # ast.Attribute('array_attr'), # [5, 6, 7], # ), # data # ) # assert len(result) == 1 and result[0] is data[1] def test_spatial(data): result = filter_( parse("INTERSECTS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))"), ) assert len(result) == 1 and result[0].identifier is data[0].identifier # TODO: test more spatial queries result = filter_( parse("BBOX(center, 2, 2, 3, 3)"), ) assert len(result) == 1 and result[0].identifier is data[0].identifier # def test_arithmetic(): # result = filter_( # parse('int_attr = float_attr - 0.5'), # data, # ) # assert len(result) == 2 # result = filter_( # parse('int_attr = 5 + 20 / 2 - 10'), # data, # ) # assert len(result) == 1 and result[0] is data[0] # def test_function(): # result = filter_( # parse('sin(float_attr) BETWEEN -0.75 AND -0.70'), # data, # ) # assert len(result) == 1 and result[0] is data[0] # def test_nested(): # result = filter_( # parse('"nested_attr.str_attr" = \'this is a test\''), # data, # ) # assert len(result) == 1 and result[0] is data[0] pygeofilter-0.3.1/tests/backends/elasticsearch/test_util.py000066400000000000000000000003701473475122500241720ustar00rootroot00000000000000from pygeofilter.backends.elasticsearch.util import like_to_wildcard def test_like_to_wildcard(): assert "This ? a test" == like_to_wildcard("This . a test", "*", ".") assert "This * a test" == like_to_wildcard("This * a test", "*", ".") pygeofilter-0.3.1/tests/backends/opensearch/000077500000000000000000000000001473475122500211215ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/opensearch/__init__.py000066400000000000000000000000001473475122500232200ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/opensearch/test_evaluate.py000066400000000000000000000203521473475122500243420ustar00rootroot00000000000000# pylint: disable=W0621,C0114,C0115,C0116 import pytest from opensearch_dsl import ( Date, DateRange, Document, Field, Float, GeoPoint, GeoShape, Index, InnerDoc, Integer, Nested, Range, Text, connections, ) from pygeofilter import ast from pygeofilter.backends.opensearch import to_filter from pygeofilter.parsers.ecql import parse from pygeofilter.util import parse_datetime class Wildcard(Field): name = "wildcard" class RecordMeta(InnerDoc): float_meta_attribute = Float() int_meta_attribute = Integer() str_meta_attribute = Text() datetime_meta_attribute = Date() class Record(Document): identifier = Text() geometry = GeoShape() center = GeoPoint() float_attribute = Float() int_attribute = Integer() str_attribute = Wildcard() maybe_str_attribute = Text() datetime_attribute = Date() daterange_attribute = DateRange() record_metas = Nested(RecordMeta) class Index: name = "record" @pytest.fixture(autouse=True, scope="session") def connection(): connections.create_connection( hosts=["http://localhost:9209"], ) @pytest.fixture(autouse=True, scope="session") def index(connection): Record.init() index = Index(Record.Index.name) yield index index.delete() @pytest.fixture(autouse=True, scope="session") def data(index): """Fixture to add initial data to the search index.""" record_a = Record( identifier="A", geometry="MULTIPOLYGON(((0 0, 0 5, 5 5,5 0,0 0)))", center="POINT(2.5 2.5)", float_attribute=0.0, int_attribute=5, str_attribute="this is a test", maybe_str_attribute=None, datetime_attribute=parse_datetime("2000-01-01T00:00:00Z"), daterange_attribute=Range( gte=parse_datetime("2000-01-01T00:00:00Z"), lte=parse_datetime("2000-01-02T00:00:00Z"), ), ) record_a.save() record_b = Record( identifier="B", geometry="MULTIPOLYGON(((5 5, 5 10, 10 10,10 5,5 5)))", center="POINT(7.5 7.5)", float_attribute=30.0, int_attribute=None, str_attribute="this is another test", maybe_str_attribute="some value", datetime_attribute=parse_datetime("2000-01-01T00:00:10Z"), daterange_attribute=Range( gte=parse_datetime("2000-01-04T00:00:00Z"), lte=parse_datetime("2000-01-05T00:00:00Z"), ), ) record_b.save() index.refresh() yield [record_a, record_b] def filter_(ast_): query = to_filter(ast_, version="8.2") print(query) result = Record.search().query(query).execute() print([r.identifier for r in result]) return result def test_comparison(data): result = filter_(parse("int_attribute = 5")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute < 6")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute > 6")) assert len(result) == 1 and result[0].identifier == data[1].identifier result = filter_(parse("int_attribute <= 5")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("float_attribute >= 8")) assert len(result) == 1 and result[0].identifier == data[1].identifier result = filter_(parse("float_attribute <> 0.0")) assert len(result) == 1 and result[0].identifier == data[1].identifier def test_combination(data): result = filter_(parse("int_attribute = 5 AND float_attribute < 6.0")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("int_attribute = 6 OR float_attribute < 6.0")) assert len(result) == 1 and result[0].identifier == data[0].identifier def test_between(data): result = filter_(parse("float_attribute BETWEEN -1 AND 1")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("int_attribute NOT BETWEEN 4 AND 6")) assert len(result) == 1 and result[0].identifier == data[1].identifier def test_like(data): result = filter_(parse("str_attribute LIKE 'this is a test'")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("str_attribute LIKE 'this is % test'")) assert len(result) == 2 result = filter_(parse("str_attribute NOT LIKE '% another test'")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("str_attribute NOT LIKE 'this is . test'")) assert len(result) == 1 and result[0].identifier == data[1].identifier result = filter_(parse("str_attribute ILIKE 'THIS IS . TEST'")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("str_attribute ILIKE 'THIS IS % TEST'")) assert len(result) == 2 def test_in(data): result = filter_(parse("int_attribute IN ( 1, 2, 3, 4, 5 )")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("int_attribute NOT IN ( 1, 2, 3, 4, 5 )")) assert len(result) == 1 and result[0].identifier == data[1].identifier def test_null(data): result = filter_(parse("maybe_str_attribute IS NULL")) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_(parse("maybe_str_attribute IS NOT NULL")) assert len(result) == 1 and result[0].identifier == data[1].identifier def test_has_attr(): result = filter_(parse("extra_attr EXISTS")) assert len(result) == 0 result = filter_(parse("extra_attr DOES-NOT-EXIST")) assert len(result) == 2 def test_temporal(data): result = filter_( ast.TimeDisjoint( ast.Attribute("datetime_attribute"), [ parse_datetime("2000-01-01T00:00:05.00Z"), parse_datetime("2000-01-01T00:00:15.00Z"), ], ) ) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_( parse("datetime_attribute BEFORE 2000-01-01T00:00:05.00Z"), ) assert len(result) == 1 and result[0].identifier == data[0].identifier result = filter_( parse("datetime_attribute AFTER 2000-01-01T00:00:05.00Z"), ) assert len(result) == 1 and result[0].identifier == data[1].identifier # def test_array(): # result = filter_( # ast.ArrayEquals( # ast.Attribute('array_attr'), # [2, 3], # ), # data # ) # assert len(result) == 1 and result[0] is data[0] # result = filter_( # ast.ArrayContains( # ast.Attribute('array_attr'), # [1, 2, 3, 4], # ), # data # ) # assert len(result) == 1 and result[0] is data[1] # result = filter_( # ast.ArrayContainedBy( # ast.Attribute('array_attr'), # [1, 2, 3, 4], # ), # data # ) # assert len(result) == 1 and result[0] is data[0] # result = filter_( # ast.ArrayOverlaps( # ast.Attribute('array_attr'), # [5, 6, 7], # ), # data # ) # assert len(result) == 1 and result[0] is data[1] def test_spatial(data): result = filter_( parse("INTERSECTS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))"), ) assert len(result) == 1 and result[0].identifier == data[0].identifier # TODO: test more spatial queries result = filter_( parse("BBOX(center, 2, 2, 3, 3)"), ) assert len(result) == 1 and result[0].identifier == data[0].identifier # def test_arithmetic(): # result = filter_( # parse('int_attr = float_attr - 0.5'), # data, # ) # assert len(result) == 2 # result = filter_( # parse('int_attr = 5 + 20 / 2 - 10'), # data, # ) # assert len(result) == 1 and result[0] is data[0] # def test_function(): # result = filter_( # parse('sin(float_attr) BETWEEN -0.75 AND -0.70'), # data, # ) # assert len(result) == 1 and result[0] is data[0] # def test_nested(): # result = filter_( # parse('"nested_attr.str_attr" = \'this is a test\''), # data, # ) # assert len(result) == 1 and result[0] is data[0] pygeofilter-0.3.1/tests/backends/oraclesql/000077500000000000000000000000001473475122500207575ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/oraclesql/__init__.py000066400000000000000000000000001473475122500230560ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/oraclesql/test_evaluate.py000066400000000000000000000130701473475122500241770ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Andreas Kosubek # Bernhard Mallinger # ------------------------------------------------------------------------------ # Copyright (C) 2023 Agrar Markt Austria # Copyright (C) 2024 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from pygeofilter.backends.oraclesql import ( to_sql_where, to_sql_where_with_bind_variables, ) from pygeofilter.parsers.ecql import parse FIELD_MAPPING = { "str_attr": "str_attr", "int_attr": "int_attr", "float_attr": "float_attr", "point_attr": "geometry_attr", } FUNCTION_MAP = {} def test_between(): where = to_sql_where( parse("int_attr NOT BETWEEN 4 AND 6"), FIELD_MAPPING, FUNCTION_MAP ) assert where == "(int_attr NOT BETWEEN 4 AND 6)" def test_between_with_binds(): where, binds = to_sql_where_with_bind_variables( parse("int_attr NOT BETWEEN 4 AND 6"), FIELD_MAPPING, FUNCTION_MAP ) assert where == "(int_attr NOT BETWEEN :int_attr_low_0 AND :int_attr_high_0)" assert binds == {"int_attr_low_0": 4, "int_attr_high_0": 6} def test_like(): where = to_sql_where(parse("str_attr LIKE 'foo%'"), FIELD_MAPPING, FUNCTION_MAP) assert where == "str_attr LIKE 'foo%' ESCAPE '\\'" def test_like_with_binds(): where, binds = to_sql_where_with_bind_variables( parse("str_attr LIKE 'foo%'"), FIELD_MAPPING, FUNCTION_MAP ) assert where == "str_attr LIKE :str_attr_0 ESCAPE '\\'" assert binds == {"str_attr_0": "foo%"} def test_combination(): where = to_sql_where( parse("int_attr = 5 AND float_attr < 6.0"), FIELD_MAPPING, FUNCTION_MAP ) assert where == "((int_attr = 5) AND (float_attr < 6.0))" def test_combination_with_binds(): where, binds = to_sql_where_with_bind_variables( parse("int_attr = 5 AND float_attr < 6.0"), FIELD_MAPPING, FUNCTION_MAP ) assert where == "((int_attr = :int_attr_0) AND (float_attr < :float_attr_1))" assert binds == {"int_attr_0": 5, "float_attr_1": 6.0} def test_spatial(): where = to_sql_where( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), FIELD_MAPPING, FUNCTION_MAP, ) geo_json = ( '{"type": "Polygon", ' '"coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]]}' ) assert where == ( "SDO_RELATE(geometry_attr, " f"SDO_UTIL.FROM_JSON(geometry => '{geo_json}', srid => 4326), " "'mask=ANYINTERACT') = 'TRUE'" ) def test_spatial_with_binds(): where, binds = to_sql_where_with_bind_variables( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), FIELD_MAPPING, FUNCTION_MAP, ) geo_json = ( '{"type": "Polygon", ' '"coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]]}' ) assert where == ( "SDO_RELATE(geometry_attr, " "SDO_UTIL.FROM_JSON(geometry => :geo_json_0, srid => :srid_0), " "'mask=ANYINTERACT') = 'TRUE'" ) assert binds == {"geo_json_0": geo_json, "srid_0": 4326} def test_bbox(): where = to_sql_where( parse("BBOX(point_attr,-140.99778,41.6751050889,-52.6480987209,83.23324)"), FIELD_MAPPING, FUNCTION_MAP, ) geo_json = ( '{"type": "Polygon", "coordinates": [[' "[-140.99778, 41.6751050889], " "[-140.99778, 83.23324], " "[-52.6480987209, 83.23324], " "[-52.6480987209, 41.6751050889], " "[-140.99778, 41.6751050889]]]}" ) assert where == ( "SDO_RELATE(geometry_attr, " f"SDO_UTIL.FROM_JSON(geometry => '{geo_json}', srid => 4326), " "'mask=ANYINTERACT') = 'TRUE'" ) def test_bbox_with_binds(): where, binds = to_sql_where_with_bind_variables( parse("BBOX(point_attr,-140.99778,41.6751050889,-52.6480987209,83.23324)"), FIELD_MAPPING, FUNCTION_MAP, ) geo_json = ( '{"type": "Polygon", "coordinates": [[' "[-140.99778, 41.6751050889], " "[-140.99778, 83.23324], " "[-52.6480987209, 83.23324], " "[-52.6480987209, 41.6751050889], " "[-140.99778, 41.6751050889]]]}" ) assert where == ( "SDO_RELATE(geometry_attr, " "SDO_UTIL.FROM_JSON(geometry => :geo_json_0, srid => :srid_0), " "'mask=ANYINTERACT') = 'TRUE'" ) assert binds == {"geo_json_0": geo_json, "srid_0": 4326} pygeofilter-0.3.1/tests/backends/sqlalchemy/000077500000000000000000000000001473475122500211345ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/sqlalchemy/__init__.py000066400000000000000000000000001473475122500232330ustar00rootroot00000000000000pygeofilter-0.3.1/tests/backends/sqlalchemy/test_evaluate.py000066400000000000000000000266021473475122500243610ustar00rootroot00000000000000import ctypes.util import dateparser import pytest from geoalchemy2 import Geometry from sqlalchemy import ( Column, DateTime, Float, ForeignKey, Integer, String, create_engine, ) from sqlalchemy.event import listen from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.sql import func, select from pygeofilter.backends.sqlalchemy.evaluate import to_filter from pygeofilter.parsers.ecql import parse Base = declarative_base() mod_spatialite = ctypes.util.find_library("mod_spatialite") if not mod_spatialite: import pathlib matches = list(pathlib.Path("/usr/lib").glob("*/mod_spatialite.so")) if matches: mod_spatialite = str(matches[0]) import pytest pytestmark = pytest.mark.skipif( not mod_spatialite, reason="mod_spatialite.so not available" ) class Record(Base): __tablename__ = "record" identifier = Column(String, primary_key=True) geometry = Column( Geometry( geometry_type="MULTIPOLYGON", srid=4326, spatial_index=False, ) ) float_attribute = Column(Float) int_attribute = Column(Integer) str_attribute = Column(String) datetime_attribute = Column(DateTime) choice_attribute = Column(Integer) class RecordMeta(Base): __tablename__ = "record_meta" identifier = Column(Integer, primary_key=True) record = Column(String, ForeignKey("record.identifier")) float_meta_attribute = Column(Float) int_meta_attribute = Column(Integer) str_meta_attribute = Column(String) datetime_meta_attribute = Column(DateTime) choice_meta_attribute = Column(Integer) FIELD_MAPPING = { "identifier": Record.identifier, "geometry": Record.geometry, "floatAttribute": Record.float_attribute, "intAttribute": Record.int_attribute, "strAttribute": Record.str_attribute, "datetimeAttribute": Record.datetime_attribute, "choiceAttribute": Record.choice_attribute, # meta fields "floatMetaAttribute": RecordMeta.float_meta_attribute, "intMetaAttribute": RecordMeta.int_meta_attribute, "strMetaAttribute": RecordMeta.str_meta_attribute, "datetimeMetaAttribute": RecordMeta.datetime_meta_attribute, "choiceMetaAttribute": RecordMeta.choice_meta_attribute, } def load_spatialite(dbapi_conn, connection_record): dbapi_conn.enable_load_extension(True) dbapi_conn.load_extension(mod_spatialite) @pytest.fixture(scope="session") def connection(): engine = create_engine("sqlite://", echo=True) listen(engine, "connect", load_spatialite) return engine.connect() def seed_database(db_session): record = Record( identifier="A", geometry="SRID=4326;MULTIPOLYGON(((0 0, 0 5, 5 5,5 0,0 0)))", float_attribute=0.0, int_attribute=10, str_attribute="AAA", datetime_attribute=dateparser.parse("2000-01-01T00:00:00Z"), choice_attribute=1, ) db_session.add(record) record_meta = RecordMeta( float_meta_attribute=10.0, int_meta_attribute=20, str_meta_attribute="AparentA", datetime_meta_attribute=dateparser.parse("2000-01-01T00:00:05Z"), choice_meta_attribute=1, record=record.identifier, ) db_session.add(record_meta) record = Record( identifier="B", geometry="SRID=4326;MULTIPOLYGON(((5 5, 5 10, 10 10,10 5,5 5)))", float_attribute=30.0, int_attribute=None, str_attribute="BBB", datetime_attribute=dateparser.parse("2000-01-01T00:00:10Z"), choice_attribute=1, ) db_session.add(record) record_meta = RecordMeta( float_meta_attribute=20.0, int_meta_attribute=30, str_meta_attribute="BparentB", datetime_meta_attribute=dateparser.parse("2000-01-01T00:00:05Z"), choice_meta_attribute=1, record=record.identifier, ) db_session.add(record_meta) db_session.commit() @pytest.fixture(scope="session") def setup_database(connection): connection.execute(select(func.InitSpatialMetaData())) Base.metadata.create_all(connection) connection.commit() seed_database( scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=connection)) ) yield Base.metadata.drop_all(connection) @pytest.fixture def db_session(setup_database, connection): transaction = connection.begin() yield scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=connection) ) transaction.rollback() def evaluate(session, cql_expr, expected_ids, filter_option=None): ast = parse(cql_expr) filters = to_filter(ast, FIELD_MAPPING, filter_option) q = session.query(Record).join(RecordMeta).filter(filters) results = [row.identifier for row in q] assert expected_ids == type(expected_ids)(results) # common comparisons def test_id_eq(db_session): evaluate(db_session, "identifier = 'A'", ("A",)) def test_id_ne(db_session): evaluate(db_session, "identifier <> 'B'", ("A",)) def test_float_lt(db_session): evaluate(db_session, "floatAttribute < 30", ("A",)) def test_float_le(db_session): evaluate(db_session, "floatAttribute <= 20", ("A",)) def test_float_gt(db_session): evaluate(db_session, "floatAttribute > 20", ("B",)) def test_float_ge(db_session): evaluate(db_session, "floatAttribute >= 30", ("B",)) def test_float_between(db_session): evaluate(db_session, "floatAttribute BETWEEN -1 AND 1", ("A",)) # test different field types def test_common_value_eq(db_session): evaluate(db_session, "strAttribute = 'AAA'", ("A",)) def test_common_value_in(db_session): evaluate(db_session, "strAttribute IN ('AAA', 'XXX')", ("A",)) def test_common_value_like(db_session): evaluate(db_session, "strAttribute LIKE 'AA%'", ("A",)) def test_common_value_like_middle(db_session): evaluate(db_session, "strAttribute LIKE 'A%A'", ("A",)) def test_like_beginswith(db_session): evaluate(db_session, "strMetaAttribute LIKE 'A%'", ("A",)) def test_ilike_beginswith(db_session): evaluate(db_session, "strMetaAttribute ILIKE 'a%'", ("A",)) def test_like_endswith(db_session): evaluate(db_session, "strMetaAttribute LIKE '%A'", ("A",)) def test_ilike_endswith(db_session): evaluate(db_session, "strMetaAttribute ILIKE '%a'", ("A",)) def test_like_middle(db_session): evaluate(db_session, "strMetaAttribute LIKE '%parent%'", ("A", "B")) def test_like_startswith_middle(db_session): evaluate(db_session, "strMetaAttribute LIKE 'A%rent%'", ("A",)) def test_like_middle_endswith(db_session): evaluate(db_session, "strMetaAttribute LIKE '%ren%A'", ("A",)) def test_like_startswith_middle_endswith(db_session): evaluate(db_session, "strMetaAttribute LIKE 'A%ren%A'", ("A",)) def test_ilike_middle(db_session): evaluate(db_session, "strMetaAttribute ILIKE '%PaReNT%'", ("A", "B")) def test_not_like_beginswith(db_session): evaluate(db_session, "strMetaAttribute NOT LIKE 'B%'", ("A",)) def test_not_ilike_beginswith(db_session): evaluate(db_session, "strMetaAttribute NOT ILIKE 'b%'", ("A",)) def test_not_like_endswith(db_session): evaluate(db_session, "strMetaAttribute NOT LIKE '%B'", ("A",)) def test_not_ilike_endswith(db_session): evaluate(db_session, "strMetaAttribute NOT ILIKE '%b'", ("A",)) # (NOT) IN def test_string_in(db_session): evaluate(db_session, "identifier IN ('A', 'B')", ("A", "B")) def test_string_not_in(db_session): evaluate(db_session, "identifier NOT IN ('B', 'C')", ("A",)) # (NOT) NULL def test_string_null(db_session): evaluate(db_session, "intAttribute IS NULL", ("B",)) def test_string_not_null(db_session): evaluate(db_session, "intAttribute IS NOT NULL", ("A",)) # temporal predicates def test_before(db_session): evaluate(db_session, "datetimeAttribute BEFORE 2000-01-01T00:00:01Z", ("A",)) def test_before_or_during_dt_dt(db_session): evaluate( db_session, "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / 2000-01-01T00:00:01Z", ("A",), ) def test_before_or_during_dt_td(db_session): evaluate( db_session, "datetimeAttribute BEFORE OR DURING " "2000-01-01T00:00:00Z / PT4S", ("A",), ) def test_before_or_during_td_dt(db_session): evaluate( db_session, "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",), ) def test_during_td_dt(db_session): evaluate( db_session, "datetimeAttribute BEFORE OR DURING " "PT4S / 2000-01-01T00:00:03Z", ("A",), ) # spatial predicates def test_intersects_point(db_session): evaluate(db_session, "INTERSECTS(geometry, POINT(1 1.0))", ("A",)) def test_intersects_mulitipoint_1(db_session): evaluate(db_session, "INTERSECTS(geometry, MULTIPOINT(0 0, 1 1))", ("A",)) def test_intersects_mulitipoint_2(db_session): evaluate(db_session, "INTERSECTS(geometry, MULTIPOINT((0 0), (1 1)))", ("A",)) def test_intersects_linestring(db_session): evaluate(db_session, "INTERSECTS(geometry, LINESTRING(0 0, 1 1))", ("A",)) def test_intersects_multilinestring(db_session): evaluate( db_session, "INTERSECTS(geometry, MULTILINESTRING((0 0, 1 1), (2 1, 1 2)))", ("A",), ) def test_intersects_polygon(db_session): evaluate( db_session, "INTERSECTS(geometry, " "POLYGON((0 0, 3 0, 3 3, 0 3, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1)))", ("A",), ) def test_intersects_multipolygon(db_session): evaluate( db_session, "INTERSECTS(geometry, " "MULTIPOLYGON(((0 0, 3 0, 3 3, 0 3, 0 0), " "(1 1, 2 1, 2 2, 1 2, 1 1))))", ("A",), ) def test_intersects_envelope(db_session): evaluate(db_session, "INTERSECTS(geometry, ENVELOPE(0 1.0 0 1.0))", ("A",)) # Commented out as not supported in spatialite for testing # def test_dwithin(db_session): # evaluate(db_session, "DWITHIN(geometry, POINT(0 0), 10, meters)", ("A",)) # def test_beyond(db_session): # evaluate(db_session, "BEYOND(geometry, POINT(0 0), 10, meters)", ("B",)) def test_bbox(db_session): evaluate(db_session, "BBOX(geometry, 0, 0, 1, 1, '4326')", ("A",)) # arithmethic expressions def test_arith_simple_plus(db_session): evaluate(db_session, "intMetaAttribute = 10 + 10", ("A",)) def test_arith_field_plus_1(db_session): evaluate(db_session, "intMetaAttribute = floatMetaAttribute + 10", ("A", "B")) def test_arith_field_plus_2(db_session): evaluate(db_session, "intMetaAttribute = 10 + floatMetaAttribute", ("A", "B")) def test_arith_field_plus_field(db_session): evaluate( db_session, "intMetaAttribute = " "floatMetaAttribute + intAttribute", ("A",) ) def test_arith_field_plus_mul_1(db_session): evaluate(db_session, "intMetaAttribute = intAttribute * 1.5 + 5", ("A",)) def test_arith_field_plus_mul_2(db_session): evaluate(db_session, "intMetaAttribute = 5 + intAttribute * 1.5", ("A",)) # handling undefined/invalid attributes def test_undef_comp(db_session): # treat undefined/invalid attribute as null evaluate(db_session, "missingAttribute > 10", (), True) def test_undef_isnull(db_session): evaluate(db_session, "missingAttribute IS NULL", ("A", "B"), True) def test_undef_comp_error(db_session): # error if undefined/invalid attribute with pytest.raises(KeyError): evaluate(db_session, "missingAttribute > 10", (), False) pygeofilter-0.3.1/tests/backends/sqlalchemy/test_filters.py000066400000000000000000000016071473475122500242210ustar00rootroot00000000000000from typing import cast import pytest from pygeofilter.backends.sqlalchemy import filters @pytest.mark.parametrize( "geom, expected", [ pytest.param( {"type": "Point", "coordinates": [10, 12]}, "ST_GeomFromEWKT('SRID=4326;POINT (10 12)')", id="without-crs", ), pytest.param( { "type": "Point", "coordinates": [1, 2], "crs": { "type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::3004"}, }, }, "ST_GeomFromEWKT('SRID=3004;POINT (1 2)')", id="with-crs", ), ], ) def test_parse_geometry(geom, expected): parsed = filters.parse_geometry(cast(dict, geom)) result = str(parsed.compile(compile_kwargs={"literal_binds": True})) assert result == expected pygeofilter-0.3.1/tests/native/000077500000000000000000000000001473475122500165065ustar00rootroot00000000000000pygeofilter-0.3.1/tests/native/__init__.py000066400000000000000000000000001473475122500206050ustar00rootroot00000000000000pygeofilter-0.3.1/tests/native/test_evaluate.py000066400000000000000000000313401473475122500217260ustar00rootroot00000000000000import math from dataclasses import dataclass from datetime import date, datetime, timezone from typing import List, Optional import pytest from shapely.geometry import Point from pygeofilter import ast from pygeofilter.backends.native.evaluate import NativeEvaluator from pygeofilter.parsers.ecql import parse @dataclass class Nested: str_attr: str @dataclass class Record: str_attr: str maybe_str_attr: Optional[str] int_attr: int float_attr: float date_attr: date datetime_attr: datetime point_attr: Point array_attr: List[int] nested_attr: Nested @pytest.fixture def data(): data = [ Record( "this is a test", None, 5, 5.5, date(2010, 1, 1), datetime(2010, 1, 1, tzinfo=timezone.utc), Point(1, 1), [2, 3], Nested("this is a test"), ), Record( "this is another test", "not null", 8, 8.5, date(2010, 1, 10), datetime(2010, 1, 10, tzinfo=timezone.utc), Point(2, 2), [1, 2, 3, 4, 5], Nested("this is another test"), ), ] data[0].extra_attr = 123 return data def filter_(ast, data): filter_expr = NativeEvaluator( math.__dict__, allow_nested_attributes=True, ).evaluate(ast) return [record for record in data if filter_expr(record)] @pytest.fixture def data_json(): data = [ { "type": "Feature", "geometry": {"type": "Point", "coordinates": (1, 1)}, "properties": { "str_attr": "this is a test", "maybe_str_attr": None, "int_attr": 5, "float_attr": 5.5, "date_attr": "2010-01-01", "datetime_attr": "2010-01-01T00:00:00Z", "array_attr": [2, 3], "extra_attr": 123, }, }, { "type": "Feature", "geometry": {"type": "Point", "coordinates": (2, 2)}, "properties": { "str_attr": "this is another test", "maybe_str_attr": "not null", "int_attr": 8, "float_attr": 8.5, "date_attr": "2010-01-10", "datetime_attr": "2010-01-10T00:00:00Z", "array_attr": [1, 2, 3, 4, 5], }, }, ] return data def filter_json(ast, data): attr_map = {"point_attr": "geometry", "*": "properties.*"} filter_expr = NativeEvaluator(math.__dict__, attr_map, use_getattr=False).evaluate( ast ) return [record for record in data if filter_expr(record)] def test_comparison(data): result = filter_(parse("int_attr = 5"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr < 6"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr > 6"), data) assert len(result) == 1 and result[0] is data[1] result = filter_(parse("int_attr <= 5"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr >= 8"), data) assert len(result) == 1 and result[0] is data[1] result = filter_(parse("int_attr <> 5"), data) assert len(result) == 1 and result[0] is data[1] def test_comparison_json(data_json): result = filter_json(parse("int_attr = 5"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr < 6"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr > 6"), data_json) assert len(result) == 1 and result[0] is data_json[1] result = filter_json(parse("int_attr <= 5"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr >= 8"), data_json) assert len(result) == 1 and result[0] is data_json[1] result = filter_json(parse("int_attr <> 5"), data_json) assert len(result) == 1 and result[0] is data_json[1] def test_combination(data): result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert len(result) == 1 and result[0] is data[0] def test_combination_json(data_json): result = filter_json(parse("int_attr = 5 AND float_attr < 6.0"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr = 5 AND float_attr < 6.0"), data_json) assert len(result) == 1 and result[0] is data_json[0] def test_between(data): result = filter_(parse("float_attr BETWEEN 4 AND 6"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr NOT BETWEEN 4 AND 6"), data) assert len(result) == 1 and result[0] is data[1] def test_between_json(data_json): result = filter_json(parse("float_attr BETWEEN 4 AND 6"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr NOT BETWEEN 4 AND 6"), data_json) assert len(result) == 1 and result[0] is data_json[1] def test_like(data): result = filter_(parse("str_attr LIKE 'this is . test'"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("str_attr LIKE 'this is % test'"), data) assert len(result) == 2 result = filter_(parse("str_attr NOT LIKE '% another test'"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("str_attr NOT LIKE 'this is . test'"), data) assert len(result) == 1 and result[0] is data[1] result = filter_(parse("str_attr ILIKE 'THIS IS . TEST'"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("str_attr ILIKE 'THIS IS % TEST'"), data) assert len(result) == 2 def test_like_json(data_json): result = filter_json(parse("str_attr LIKE 'this is . test'"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("str_attr LIKE 'this is % test'"), data_json) assert len(result) == 2 result = filter_json(parse("str_attr NOT LIKE '% another test'"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("str_attr NOT LIKE 'this is . test'"), data_json) assert len(result) == 1 and result[0] is data_json[1] result = filter_json(parse("str_attr ILIKE 'THIS IS . TEST'"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("str_attr ILIKE 'THIS IS % TEST'"), data_json) assert len(result) == 2 def test_in(data): result = filter_(parse("int_attr IN ( 1, 2, 3, 4, 5 )"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("int_attr NOT IN ( 1, 2, 3, 4, 5 )"), data) assert len(result) == 1 and result[0] is data[1] def test_in_json(data_json): result = filter_json(parse("int_attr IN ( 1, 2, 3, 4, 5 )"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("int_attr NOT IN ( 1, 2, 3, 4, 5 )"), data_json) assert len(result) == 1 and result[0] is data_json[1] def test_null(data): result = filter_(parse("maybe_str_attr IS NULL"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("maybe_str_attr IS NOT NULL"), data) assert len(result) == 1 and result[0] is data[1] def test_null_json(data_json): result = filter_json(parse("maybe_str_attr IS NULL"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("maybe_str_attr IS NOT NULL"), data_json) assert len(result) == 1 and result[0] is data_json[1] def test_has_attr(data): result = filter_(parse("extra_attr EXISTS"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("extra_attr DOES-NOT-EXIST"), data) assert len(result) == 1 and result[0] is data[1] def test_has_attr_json(data_json): result = filter_json(parse("extra_attr EXISTS"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json(parse("extra_attr DOES-NOT-EXIST"), data_json) assert len(result) == 1 and result[0] is data_json[1] def test_temporal(data): result = filter_(parse("date_attr BEFORE 2010-01-08T00:00:00.00Z"), data) assert len(result) == 1 and result[0] is data[0] result = filter_(parse("date_attr AFTER 2010-01-08T00:00:00.00+01:00"), data) assert len(result) == 1 and result[0] is data[1] def test_temporal_json(data_json): result = filter_json(parse("date_attr BEFORE 2010-01-08T00:00:00.00Z"), data_json) assert len(result) == 1 and result[0] is data_json[0] result = filter_json( parse("date_attr AFTER 2010-01-08T00:00:00.00+01:00"), data_json ) assert len(result) == 1 and result[0] is data_json[1] def test_array(data): result = filter_( ast.ArrayEquals( ast.Attribute("array_attr"), [2, 3], ), data, ) assert len(result) == 1 and result[0] is data[0] result = filter_( ast.ArrayContains( ast.Attribute("array_attr"), [1, 2, 3, 4], ), data, ) assert len(result) == 1 and result[0] is data[1] result = filter_( ast.ArrayContainedBy( ast.Attribute("array_attr"), [1, 2, 3, 4], ), data, ) assert len(result) == 1 and result[0] is data[0] result = filter_( ast.ArrayOverlaps( ast.Attribute("array_attr"), [5, 6, 7], ), data, ) assert len(result) == 1 and result[0] is data[1] def test_array_json(data_json): result = filter_json( ast.ArrayEquals( ast.Attribute("array_attr"), [2, 3], ), data_json, ) assert len(result) == 1 and result[0] is data_json[0] result = filter_json( ast.ArrayContains( ast.Attribute("array_attr"), [1, 2, 3, 4], ), data_json, ) assert len(result) == 1 and result[0] is data_json[1] result = filter_json( ast.ArrayContainedBy( ast.Attribute("array_attr"), [1, 2, 3, 4], ), data_json, ) assert len(result) == 1 and result[0] is data_json[0] result = filter_json( ast.ArrayOverlaps( ast.Attribute("array_attr"), [5, 6, 7], ), data_json, ) assert len(result) == 1 and result[0] is data_json[1] def test_spatial(data): result = filter_( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), data, ) assert len(result) == 1 and result[0] is data[0] result = filter_( parse("EQUALS(point_attr, POINT(2 2))"), data, ) assert len(result) == 1 and result[0] is data[1] result = filter_( parse("BBOX(point_attr, 0.5, 0.5, 1.5, 1.5)"), data, ) assert len(result) == 1 and result[0] is data[0] def test_spatial_json(data_json): result = filter_json( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), data_json, ) assert len(result) == 1 and result[0] is data_json[0] result = filter_json( parse("EQUALS(point_attr, POINT(2 2))"), data_json, ) assert len(result) == 1 and result[0] is data_json[1] result = filter_json( parse("BBOX(point_attr, 0.5, 0.5, 1.5, 1.5)"), data_json, ) assert len(result) == 1 and result[0] is data_json[0] def test_arithmetic(data): result = filter_( parse("int_attr = float_attr - 0.5"), data, ) assert len(result) == 2 result = filter_( parse("int_attr = 5 + 20 / 2 - 10"), data, ) assert len(result) == 1 and result[0] is data[0] def test_arithmetic_json(data_json): result = filter_json( parse("int_attr = float_attr - 0.5"), data_json, ) assert len(result) == 2 result = filter_json( parse("int_attr = 5 + 20 / 2 - 10"), data_json, ) assert len(result) == 1 and result[0] is data_json[0] def test_function(data): result = filter_( parse("sin(float_attr) BETWEEN -0.75 AND -0.70"), data, ) assert len(result) == 1 and result[0] is data[0] def test_function_json(data_json): result = filter_json( parse("sin(float_attr) BETWEEN -0.75 AND -0.70"), data_json, ) assert len(result) == 1 and result[0] is data_json[0] def test_nested(data): result = filter_( parse("\"nested_attr.str_attr\" = 'this is a test'"), data, ) assert len(result) == 1 and result[0] is data[0] pygeofilter-0.3.1/tests/parsers/000077500000000000000000000000001473475122500166775ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/__init__.py000066400000000000000000000000001473475122500207760ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql2_json/000077500000000000000000000000001473475122500205715ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql2_json/__init__.py000066400000000000000000000000001473475122500226700ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql2_json/fixtures.json000066400000000000000000000122441473475122500233400ustar00rootroot00000000000000{ "Example 1": { "text": "filter=id='LC08_L1TP_060247_20180905_20180912_01_T1_L1TP' AND collection='landsat8_l1tp'", "json": "{\"filter\": {\"op\": \"and\", \"args\": [{\"op\": \"=\", \"args\": [{\"property\": \"id\"}, \"LC08_L1TP_060247_20180905_20180912_01_T1_L1TP\"]}, {\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat8_l1tp\"]}]}}" }, "Example 2": { "text": "filter=collection = 'landsat8_l1tp' AND eo:cloud_cover <= 10 AND datetime >= TIMESTAMP('2021-04-08T04:39:23Z') AND S_INTERSECTS(geometry, POLYGON((43.5845 -79.5442, 43.6079 -79.4893, 43.5677 -79.4632, 43.6129 -79.3925, 43.6223 -79.3238, 43.6576 -79.3163, 43.7945 -79.1178, 43.8144 -79.1542, 43.8555 -79.1714, 43.7509 -79.6390, 43.5845 -79.5442)))", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"and\", \"args\": [{\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat8_l1tp\"]}, {\"op\": \"<=\", \"args\": [{\"property\": \"eo:cloud_cover\"}, 10]}, {\"op\": \">=\", \"args\": [{\"property\": \"datetime\"}, {\"timestamp\": \"2021-04-08T04:39:23Z\"}]}, {\"op\": \"s_intersects\", \"args\": [{\"property\": \"geometry\"}, {\"type\": \"Polygon\", \"coordinates\": [[[43.5845, -79.5442], [43.6079, -79.4893], [43.5677, -79.4632], [43.6129, -79.3925], [43.6223, -79.3238], [43.6576, -79.3163], [43.7945, -79.1178], [43.8144, -79.1542], [43.8555, -79.1714], [43.7509, -79.639], [43.5845, -79.5442]]]}]}]}}" }, "Example 3": { "text": "filter=sentinel:data_coverage > 50 AND eo:cloud_cover < 10 ", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"and\", \"args\": [{\"op\": \">\", \"args\": [{\"property\": \"sentinel:data_coverage\"}, 50]}, {\"op\": \"<\", \"args\": [{\"property\": \"eo:cloud_cover\"}, 10]}]}}" }, "Example 4": { "text": "filter=sentinel:data_coverage > 50 OR eo:cloud_cover < 10 ", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"or\", \"args\": [{\"op\": \">\", \"args\": [{\"property\": \"sentinel:data_coverage\"}, 50]}, {\"op\": \"<\", \"args\": [{\"property\": \"eo:cloud_cover\"}, 10]}]}}" }, "Example 5": { "text": "filter=prop1 = prop2", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"=\", \"args\": [{\"property\": \"prop1\"}, {\"property\": \"prop2\"}]}}" }, "Example 6": { "text": "filter=datetime T_INTERSECTS INTERVAL('2020-11-11T00:00:00Z', '2020-11-12T00:00:00Z')", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"t_intersects\", \"args\": [{\"property\": \"datetime\"}, {\"interval\": [\"2020-11-11T00:00:00Z\", \"2020-11-12T00:00:00Z\"]}]}}" }, "Example 7": { "text": "filter=S_INTERSECTS(geometry,POLYGON((-77.0824 38.7886,-77.0189 38.7886,-77.0189 38.8351,-77.0824 38.8351,-77.0824 38.7886)))", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"s_intersects\", \"args\": [{\"property\": \"geometry\"}, {\"type\": \"Polygon\", \"coordinates\": [[[-77.0824, 38.7886], [-77.0189, 38.7886], [-77.0189, 38.8351], [-77.0824, 38.8351], [-77.0824, 38.7886]]]}]}}" }, "Example 8": { "text": "filter=S_INTERSECTS(geometry,POLYGON((-77.0824 38.7886,-77.0189 38.7886,-77.0189 38.8351,-77.0824 38.8351,-77.0824 38.7886))) OR S_INTERSECTS(geometry,POLYGON((-79.0935 38.7886,-79.0290 38.7886,-79.0290 38.8351,-79.0935 38.8351,-79.0935 38.7886)))", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"or\", \"args\": [{\"op\": \"s_intersects\", \"args\": [{\"property\": \"geometry\"}, {\"type\": \"Polygon\", \"coordinates\": [[[-77.0824, 38.7886], [-77.0189, 38.7886], [-77.0189, 38.8351], [-77.0824, 38.8351], [-77.0824, 38.7886]]]}]}, {\"op\": \"s_intersects\", \"args\": [{\"property\": \"geometry\"}, {\"type\": \"Polygon\", \"coordinates\": [[[-79.0935, 38.7886], [-79.029, 38.7886], [-79.029, 38.8351], [-79.0935, 38.8351], [-79.0935, 38.7886]]]}]}]}}" }, "Example 9": { "text": "filter=sentinel:data_coverage > 50 OR landsat:coverage_percent < 10 OR (sentinel:data_coverage IS NULL AND landsat:coverage_percent IS NULL)", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"or\", \"args\": [{\"op\": \">\", \"args\": [{\"property\": \"sentinel:data_coverage\"}, 50]}, {\"op\": \"<\", \"args\": [{\"property\": \"landsat:coverage_percent\"}, 10]}, {\"op\": \"and\", \"args\": [{\"op\": \"isNull\", \"args\": [{\"property\": \"sentinel:data_coverage\"}]}, {\"op\": \"isNull\", \"args\": [{\"property\": \"landsat:coverage_percent\"}]}]}]}}" }, "Example 10": { "text": "filter=eo:cloud_cover BETWEEN 0 AND 50", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"between\", \"args\": [{\"property\": \"eo:cloud_cover\"}, [0, 50]]}}" }, "Example 11": { "text": "filter=mission LIKE 'sentinel%'", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"like\", \"args\": [{\"property\": \"mission\"}, \"sentinel%\"]}}" }, "Example 12": { "text": "filter=CASEI(provider) = 'coolsat'", "json": "{\"filter-lang\": \"cql2-json\", \"filter\": {\"op\": \"=\", \"args\": [{\"lower\": {\"property\": \"provider\"}}, \"coolsat\"]}}" } } pygeofilter-0.3.1/tests/parsers/cql2_json/get_fixtures.py000066400000000000000000000011521473475122500236520ustar00rootroot00000000000000"""Get fixtures from the spec.""" import json import re import requests url = ( "https://raw.githubusercontent.com/radiantearth/" "stac-api-spec/dev/fragments/filter/README.md" ) fixtures = {} examples_text = requests.get(url).text examples_raw = re.findall( r"### (Example \d+).*?```http" r"(.*?)" r"```.*?```json" r"(.*?)" r"```", examples_text, re.S, ) for example in examples_raw: fixtures[example[0]] = { "text": example[1].replace("\n", ""), "json": json.dumps(json.loads(example[2])), } with open("fixtures.json", "w") as f: json.dump(fixtures, f, indent=4) pygeofilter-0.3.1/tests/parsers/cql2_json/test_cql2_spec_fixtures.py000066400000000000000000000016001473475122500260030ustar00rootroot00000000000000import json import pathlib from pygeofilter.backends.cql2_json import to_cql2 from pygeofilter.parsers.cql2_json import parse as json_parse from pygeofilter.parsers.cql2_text import parse as text_parse dir = pathlib.Path(__file__).parent.resolve() fixtures = pathlib.Path(dir, "fixtures.json") def test_fixtures(): """Test against fixtures from spec documentation. Parses both cql2_text and cql2_json from spec documentation and makes sure AST is the same and that json when each are converted back to cql2_json is the same. """ with open(fixtures) as f: examples = json.load(f) for _, v in examples.items(): t = v["text"].replace("filter=", "") j = v["json"] parsed_text = text_parse(t) parsed_json = json_parse(j) assert parsed_text == parsed_json assert to_cql2(parsed_text) == to_cql2(parsed_json) pygeofilter-0.3.1/tests/parsers/cql2_json/test_parser.py000066400000000000000000000413471473475122500235070ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import datetime, timedelta from dateparser.timezone_parser import StaticTzInfo from pygeoif import geometry from pygeofilter import ast, values from pygeofilter.parsers.cql2_json import parse def normalize_geom(geometry): if hasattr(geometry, "__geo_interface__"): geometry = geometry.__geo_interface__ return json.loads(json.dumps(geometry)) def test_attribute_eq_literal(): result = parse('{ "op": "eq", "args":[{ "property": "attr" }, "A"]}') assert result == ast.Equal( ast.Attribute("attr"), "A", ) def test_attribute_lt_literal(): result = parse('{"op": "lt", "args": [{ "property": "attr" }, 5]}') assert result == ast.LessThan( ast.Attribute("attr"), 5.0, ) def test_attribute_lte_literal(): result = parse('{ "op": "lte", "args": [{ "property": "attr" }, 5]}') assert result == ast.LessEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_gt_literal(): result = parse('{ "op": "gt", "args": [{ "property": "attr" }, 5]}') assert result == ast.GreaterThan( ast.Attribute("attr"), 5.0, ) def test_attribute_gte_literal(): result = parse('{"op": "gte", "args":[{ "property": "attr" }, 5]}') assert result == ast.GreaterEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_between(): result = parse({"op": "between", "args": [{"property": "attr"}, [2, 5]]}) assert result == ast.Between( ast.Attribute("attr"), 2, 5, False, ) def test_attribute_between_negative_positive(): result = parse({"op": "between", "args": [{"property": "attr"}, [-1, 1]]}) assert result == ast.Between( ast.Attribute("attr"), -1, 1, False, ) def test_string_like(): result = parse( { "op": "like", "args": [ {"property": "attr"}, "some%", ], } ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_attribute_in_list(): result = parse( { "op": "in", "args": [ {"property": "attr"}, [1, 2, 3, 4], ], } ) assert result == ast.In( ast.Attribute("attr"), [ 1, 2, 3, 4, ], False, ) def test_attribute_is_null(): result = parse({"op": "isNull", "args": [{"property": "attr"}]}) assert result == ast.IsNull(ast.Attribute("attr"), False) def test_attribute_before(): result = parse( { "op": "t_before", "args": [ {"property": "attr"}, {"timestamp": "2000-01-01T00:00:01Z"}, ], } ) assert result == ast.TimeBefore( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_attribute_after_dt_dt(): result = parse( { "op": "t_after", "args": [ {"property": "attr"}, {"interval": ["2000-01-01T00:00:00Z", "2000-01-01T00:00:01Z"]}, ], } ) assert result == ast.TimeAfter( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_meets_dt_dr(): result = parse( { "op": "t_meets", "args": [ {"property": "attr"}, {"interval": ["2000-01-01T00:00:00Z", "PT4S"]}, ], } ) assert result == ast.TimeMeets( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), timedelta(seconds=4), ), ) def test_attribute_metby_dr_dt(): result = parse( { "op": "t_metby", "args": [ {"property": "attr"}, {"interval": ["PT4S", "2000-01-01T00:00:03Z"]}, ], } ) assert result == ast.TimeMetBy( ast.Attribute("attr"), values.Interval( timedelta(seconds=4), datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_toverlaps_open_dt(): result = parse( { "op": "t_overlaps", "args": [ {"property": "attr"}, {"interval": ["..", "2000-01-01T00:00:03Z"]}, ], } ) assert result == ast.TimeOverlaps( ast.Attribute("attr"), values.Interval( None, datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_overlappedby_dt_open(): result = parse( { "op": "t_overlappedby", "args": [ {"property": "attr"}, {"interval": ["2000-01-01T00:00:03Z", ".."]}, ], } ) assert result == ast.TimeOverlappedBy( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), None, ), ) # Array predicate def test_attribute_aequals(): result = parse({"op": "a_equals", "args": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayEquals( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_aoverlaps(): result = parse({"op": "a_overlaps", "args": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayOverlaps( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_acontains(): result = parse({"op": "a_contains", "args": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayContains( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_acontainedby(): result = parse( {"op": "a_containedby", "args": [{"property": "arrayattr"}, [1, 2, 3]]} ) assert result == ast.ArrayContainedBy( ast.Attribute("arrayattr"), [1, 2, 3], ) # Spatial predicate def test_intersects_attr_point(): result = parse( { "op": "s_intersects", "args": [ {"property": "geometry"}, { "type": "Point", "coordinates": [1, 1], }, ], } ) assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry(normalize_geom(geometry.Point(1, 1).__geo_interface__)), ) def test_disjoint_linestring_attr(): result = parse( { "op": "s_disjoint", "args": [ { "type": "LineString", "coordinates": [[1, 1], [2, 2]], "bbox": [1.0, 1.0, 2.0, 2.0], }, {"property": "geometry"}, ], } ) assert result == ast.GeometryDisjoint( values.Geometry( normalize_geom(geometry.LineString([(1, 1), (2, 2)]).__geo_interface__), ), ast.Attribute("geometry"), ) def test_contains_attr_polygon(): result = parse( { "op": "s_contains", "args": [ {"property": "geometry"}, { "type": "Polygon", "coordinates": [[[1, 1], [2, 2], [0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ], } ) assert result == ast.GeometryContains( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__ ), ), ) def test_within_multipolygon_attr(): result = parse( { "op": "s_within", "args": [ { "type": "MultiPolygon", "coordinates": [[[[1, 1], [2, 2], [0, 3], [1, 1]]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, {"property": "geometry"}, ], } ) assert result == ast.GeometryWithin( values.Geometry( normalize_geom( geometry.MultiPolygon.from_polygons( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) ).__geo_interface__ ), ), ast.Attribute("geometry"), ) def test_touches_attr_multilinestring(): result = parse( { "op": "s_touches", "args": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ], } ) assert result == ast.GeometryTouches( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ), ), ) def test_crosses_attr_multilinestring(): result = parse( { "op": "s_crosses", "args": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ], } ) assert result == ast.GeometryCrosses( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ) ), ) def test_overlaps_attr_multilinestring(): result = parse( { "op": "s_overlaps", "args": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ], } ) assert result == ast.GeometryOverlaps( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ), ), ) def test_attribute_arithmetic_add(): result = parse( { "op": "eq", "args": [{"property": "attr"}, {"op": "+", "args": [5, 2]}], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 5, 2, ), ) def test_attribute_arithmetic_sub(): result = parse( { "op": "eq", "args": [{"property": "attr"}, {"op": "-", "args": [5, 2]}], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( 5, 2, ), ) def test_attribute_arithmetic_mul(): result = parse( { "op": "eq", "args": [{"property": "attr"}, {"op": "*", "args": [5, 2]}], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Mul( 5, 2, ), ) def test_attribute_arithmetic_div(): result = parse( { "op": "eq", "args": [{"property": "attr"}, {"op": "/", "args": [5, 2]}], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 5, 2, ), ) def test_attribute_arithmetic_add_mul(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, { "op": "+", "args": [ 3, {"op": "*", "args": [5, 2]}, ], }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 3, ast.Mul( 5, 2, ), ), ) def test_attribute_arithmetic_div_sub(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, { "op": "-", "args": [ {"op": "/", "args": [3, 5]}, 2, ], }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( ast.Div( 3, 5, ), 2, ), ) def test_attribute_arithmetic_div_sub_bracketted(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, { "op": "/", "args": [ 3, {"op": "-", "args": [5, 2]}, ], }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 3, ast.Sub( 5, 2, ), ), ) # test function expression parsing def test_function_no_arg(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, {"function": {"name": "myfunc", "arguments": []}}, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function("myfunc", []), ) def test_function_single_arg(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, {"function": {"name": "myfunc", "arguments": [1]}}, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [1], ), ) def test_function_attr_string_arg(): result = parse( { "op": "eq", "args": [ {"property": "attr"}, { "function": { "name": "myfunc", "arguments": [{"property": "other_attr"}, "abc"], } }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [ ast.Attribute("other_attr"), "abc", ], ), ) pygeofilter-0.3.1/tests/parsers/cql2_text/000077500000000000000000000000001473475122500206045ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql2_text/test_parser.py000066400000000000000000000013161473475122500235120ustar00rootroot00000000000000from pygeofilter import ast from pygeofilter.parsers.cql2_text import parse def test_attribute_eq_true_uppercase(): result = parse("attr = TRUE") assert result == ast.Equal( ast.Attribute("attr"), True, ) def test_attribute_eq_true_lowercase(): result = parse("attr = true") assert result == ast.Equal( ast.Attribute("attr"), True, ) def test_attribute_eq_false_uppercase(): result = parse("attr = FALSE") assert result == ast.Equal( ast.Attribute("attr"), False, ) def test_attribute_eq_false_lowercase(): result = parse("attr = false") assert result == ast.Equal( ast.Attribute("attr"), False, ) pygeofilter-0.3.1/tests/parsers/cql_json/000077500000000000000000000000001473475122500205075ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql_json/__init__.py000066400000000000000000000000001473475122500226060ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/cql_json/test_parser.py000066400000000000000000000477051473475122500234310ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import datetime, timedelta from dateparser.timezone_parser import StaticTzInfo from pygeoif import geometry from pygeofilter import ast, values from pygeofilter.parsers.cql_json import parse def normalize_geom(geometry): if hasattr(geometry, "__geo_interface__"): geometry = geometry.__geo_interface__ return json.loads(json.dumps(geometry)) def test_attribute_eq_literal(): result = parse('{ "eq": [{ "property": "attr" }, "A"]}') assert result == ast.Equal( ast.Attribute("attr"), "A", ) def test_attribute_lt_literal(): result = parse('{ "lt": [{ "property": "attr" }, 5]}') assert result == ast.LessThan( ast.Attribute("attr"), 5.0, ) def test_attribute_lte_literal(): result = parse('{ "lte": [{ "property": "attr" }, 5]}') assert result == ast.LessEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_gt_literal(): result = parse('{ "gt": [{ "property": "attr" }, 5]}') assert result == ast.GreaterThan( ast.Attribute("attr"), 5.0, ) def test_attribute_gte_literal(): result = parse('{ "gte": [{ "property": "attr" }, 5]}') assert result == ast.GreaterEqual( ast.Attribute("attr"), 5.0, ) # def test_attribute_ne_literal(): # result = parse('attr <> 5') # assert result == ast.ComparisonPredicateNode( # ast.Attribute('attr'), # 5, # ast.ComparisonOp('<>'), # ) def test_attribute_between(): result = parse( { "between": { "value": {"property": "attr"}, "lower": 2, "upper": 5, } } ) assert result == ast.Between( ast.Attribute("attr"), 2, 5, False, ) # def test_attribute_not_between(): # result = parse('attr NOT BETWEEN 2 AND 5') # assert result == ast.BetweenPredicateNode( # ast.Attribute('attr'), # 2, # 5, # True, # ) def test_attribute_between_negative_positive(): result = parse( { "between": { "value": {"property": "attr"}, "lower": -1, "upper": 1, } } ) assert result == ast.Between( ast.Attribute("attr"), -1, 1, False, ) def test_string_like(): result = parse( { "like": { "like": [ {"property": "attr"}, "some%", ], "nocase": False, } } ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_string_ilike(): result = parse( { "like": { "like": [ {"property": "attr"}, "some%", ], "nocase": True, } } ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=True, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) # def test_string_not_like(): # result = parse('attr NOT LIKE "some%"') # assert result == ast.LikePredicateNode( # ast.Attribute('attr'), # 'some%', # nocase=False, # not_=True, # wildcard='%', # singlechar='.', # escapechar=None, # ) # def test_string_not_ilike(): # result = parse('attr NOT ILIKE "some%"') # assert result == ast.LikePredicateNode( # ast.Attribute('attr'), # 'some%', # nocase=True, # not_=True, # wildcard='%', # singlechar='.', # escapechar=None, # ) def test_attribute_in_list(): result = parse( { "in": { "value": {"property": "attr"}, "list": [1, 2, 3, 4], } } ) assert result == ast.In( ast.Attribute("attr"), [ 1, 2, 3, 4, ], False, ) # def test_attribute_not_in_list(): # result = parse('attr NOT IN ("A", "B", \'C\', \'D\')') # assert result == ast.InPredicateNode( # ast.Attribute('attr'), [ # "A", # "B", # "C", # "D", # ], # True # ) def test_attribute_is_null(): result = parse({"isNull": {"property": "attr"}}) assert result == ast.IsNull(ast.Attribute("attr"), False) # def test_attribute_is_not_null(): # result = parse('attr IS NOT NULL') # assert result == ast.NullPredicateNode( # ast.Attribute('attr'), True # ) # # Temporal predicate def test_attribute_before(): result = parse( { "before": [ {"property": "attr"}, "2000-01-01T00:00:01Z", ] } ) assert result == ast.TimeBefore( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_attribute_after_dt_dt(): result = parse( { "after": [ {"property": "attr"}, ["2000-01-01T00:00:00Z", "2000-01-01T00:00:01Z"], ] } ) assert result == ast.TimeAfter( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_meets_dt_dr(): result = parse({"meets": [{"property": "attr"}, ["2000-01-01T00:00:00Z", "PT4S"]]}) assert result == ast.TimeMeets( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), timedelta(seconds=4), ), ) def test_attribute_metby_dr_dt(): result = parse({"metby": [{"property": "attr"}, ["PT4S", "2000-01-01T00:00:03Z"]]}) assert result == ast.TimeMetBy( ast.Attribute("attr"), values.Interval( timedelta(seconds=4), datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_toverlaps_open_dt(): result = parse( {"toverlaps": [{"property": "attr"}, ["..", "2000-01-01T00:00:03Z"]]} ) assert result == ast.TimeOverlaps( ast.Attribute("attr"), values.Interval( None, datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_overlappedby_dt_open(): result = parse( {"overlappedby": [{"property": "attr"}, ["2000-01-01T00:00:03Z", ".."]]} ) assert result == ast.TimeOverlappedBy( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), None, ), ) # Array predicate def test_attribute_aequals(): result = parse({"aequals": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayEquals( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_aoverlaps(): result = parse({"aoverlaps": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayOverlaps( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_acontains(): result = parse({"acontains": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayContains( ast.Attribute("arrayattr"), [1, 2, 3], ) def test_attribute_acontainedby(): result = parse({"acontainedBy": [{"property": "arrayattr"}, [1, 2, 3]]}) assert result == ast.ArrayContainedBy( ast.Attribute("arrayattr"), [1, 2, 3], ) # Spatial predicate def test_intersects_attr_point(): result = parse( { "intersects": [ {"property": "geometry"}, { "type": "Point", "coordinates": [1, 1], }, ] } ) assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry(normalize_geom(geometry.Point(1, 1).__geo_interface__)), ) def test_disjoint_linestring_attr(): result = parse( { "disjoint": [ { "type": "LineString", "coordinates": [[1, 1], [2, 2]], "bbox": [1.0, 1.0, 2.0, 2.0], }, {"property": "geometry"}, ] } ) assert result == ast.GeometryDisjoint( values.Geometry( normalize_geom(geometry.LineString([(1, 1), (2, 2)]).__geo_interface__), ), ast.Attribute("geometry"), ) def test_contains_attr_polygon(): result = parse( { "contains": [ {"property": "geometry"}, { "type": "Polygon", "coordinates": [[[1, 1], [2, 2], [0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ] } ) assert result == ast.GeometryContains( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__ ), ), ) def test_within_multipolygon_attr(): result = parse( { "within": [ { "type": "MultiPolygon", "coordinates": [[[[1, 1], [2, 2], [0, 3], [1, 1]]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, {"property": "geometry"}, ] } ) assert result == ast.GeometryWithin( values.Geometry( normalize_geom( geometry.MultiPolygon.from_polygons( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) ).__geo_interface__ ), ), ast.Attribute("geometry"), ) def test_touches_attr_multilinestring(): result = parse( { "touches": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ] } ) assert result == ast.GeometryTouches( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ), ), ) def test_crosses_attr_multilinestring(): result = parse( { "crosses": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ] } ) assert result == ast.GeometryCrosses( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ) ), ) def test_overlaps_attr_multilinestring(): result = parse( { "overlaps": [ {"property": "geometry"}, { "type": "MultiLineString", "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ] } ) assert result == ast.GeometryOverlaps( ast.Attribute("geometry"), values.Geometry( normalize_geom( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__ ), ), ) # POINT(1 1) # LINESTRING(1 1,2 2) # MULTIPOLYGON(((1 1,2 2,0 3,1 1)) # MULTILINESTRING((1 1,2 2),(0 3,1 1)) # POLYGON((1 1,2 2,0 3,1 1)) # def test_equals_attr_geometrycollection(): # result = parse('OVERLAPS(geometry, )') # assert result == ast.SpatialPredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression( # geometry.MultiLineString([ # geometry.LineString([(1, 1), (2, 2)]), # geometry.LineString([(0, 3), (1, 1)]), # ]) # ), # 'OVERLAPS' # ) # relate # def test_relate_attr_polygon(): # result = parse('RELATE(geometry, POLYGON((1 1,2 2,0 3,1 1)), # "1*T***T**")') # assert result == ast.SpatialPatternPredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression( # geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) # ), # pattern='1*T***T**', # ) # # dwithin/beyond # def test_dwithin_attr_polygon(): # result = parse('DWITHIN(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, feet)') # print(get_repr(result)) # assert result == ast.SpatialDistancePredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression( # geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) # ), # ast.SpatialDistanceOp('DWITHIN'), # distance=5, # units='feet', # ) # def test_beyond_attr_polygon(): # result = parse( # 'BEYOND(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, nautical miles)' # ) # print(get_repr(result)) # assert result == ast.SpatialDistancePredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression( # geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) # ), # ast.SpatialDistanceOp('BEYOND'), # distance=5, # units='nautical miles', # ) # BBox prediacte # def test_bbox_simple(): # result = parse('BBOX(geometry, 1, 2, 3, 4)') # assert result == ast.BBoxPredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression(1), # ast.LiteralExpression(2), # ast.LiteralExpression(3), # ast.LiteralExpression(4), # ) # def test_bbox_crs(): # result = parse('BBOX(geometry, 1, 2, 3, 4, "EPSG:3875")') # assert result == ast.BBoxPredicateNode( # ast.Attribute('geometry'), # ast.LiteralExpression(1), # ast.LiteralExpression(2), # ast.LiteralExpression(3), # ast.LiteralExpression(4), # 'EPSG:3875', # ) def test_attribute_arithmetic_add(): result = parse({"eq": [{"property": "attr"}, {"+": [5, 2]}]}) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 5, 2, ), ) def test_attribute_arithmetic_sub(): result = parse({"eq": [{"property": "attr"}, {"-": [5, 2]}]}) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( 5, 2, ), ) def test_attribute_arithmetic_mul(): result = parse({"eq": [{"property": "attr"}, {"*": [5, 2]}]}) assert result == ast.Equal( ast.Attribute("attr"), ast.Mul( 5, 2, ), ) def test_attribute_arithmetic_div(): result = parse({"eq": [{"property": "attr"}, {"/": [5, 2]}]}) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 5, 2, ), ) def test_attribute_arithmetic_add_mul(): result = parse( { "eq": [ {"property": "attr"}, { "+": [ 3, {"*": [5, 2]}, ] }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 3, ast.Mul( 5, 2, ), ), ) def test_attribute_arithmetic_div_sub(): result = parse( { "eq": [ {"property": "attr"}, { "-": [ {"/": [3, 5]}, 2, ] }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( ast.Div( 3, 5, ), 2, ), ) def test_attribute_arithmetic_div_sub_bracketted(): result = parse( { "eq": [ {"property": "attr"}, { "/": [ 3, {"-": [5, 2]}, ] }, ], } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 3, ast.Sub( 5, 2, ), ), ) # test function expression parsing def test_function_no_arg(): result = parse( { "eq": [ {"property": "attr"}, {"function": {"name": "myfunc", "arguments": []}}, ] } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function("myfunc", []), ) def test_function_single_arg(): result = parse( { "eq": [ {"property": "attr"}, {"function": {"name": "myfunc", "arguments": [1]}}, ] } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [1], ), ) def test_function_attr_string_arg(): result = parse( { "eq": [ {"property": "attr"}, { "function": { "name": "myfunc", "arguments": [{"property": "other_attr"}, "abc"], } }, ] } ) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [ ast.Attribute("other_attr"), "abc", ], ), ) pygeofilter-0.3.1/tests/parsers/ecql/000077500000000000000000000000001473475122500176235ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/ecql/__init__.py000066400000000000000000000000001473475122500217220ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/ecql/test_parser.py000066400000000000000000000362631473475122500225420ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2019 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from datetime import datetime, timedelta from dateparser.timezone_parser import StaticTzInfo from pygeoif import geometry from pygeofilter import ast, values from pygeofilter.parsers.ecql import parse def test_namespace_attribute_eq_literal(): result = parse("ns:attr = 'A'") assert result == ast.Equal( ast.Attribute("ns:attr"), "A", ) def test_prefixed_attribute_eq_literal(): result = parse("properties.ns:attr = 'A'") assert result == ast.Equal( ast.Attribute("properties.ns:attr"), "A", ) def test_attribute_eq_literal(): result = parse("attr = 'A'") assert result == ast.Equal( ast.Attribute("attr"), "A", ) def test_attribute_lt_literal(): result = parse("attr < 5") assert result == ast.LessThan( ast.Attribute("attr"), 5.0, ) def test_attribute_lte_literal(): result = parse("attr <= 5") assert result == ast.LessEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_gt_literal(): result = parse("attr > 5") assert result == ast.GreaterThan( ast.Attribute("attr"), 5.0, ) def test_attribute_gte_literal(): result = parse("attr >= 5") assert result == ast.GreaterEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_ne_literal(): result = parse("attr <> 5") assert result == ast.NotEqual( ast.Attribute("attr"), 5, ) def test_attribute_between(): result = parse("attr BETWEEN 2 AND 5") assert result == ast.Between( ast.Attribute("attr"), 2, 5, False, ) def test_attribute_not_between(): result = parse("attr NOT BETWEEN 2 AND 5") assert result == ast.Between( ast.Attribute("attr"), 2, 5, True, ) def test_attribute_between_negative_positive(): result = parse("attr BETWEEN -1 AND 1") assert result == ast.Between( ast.Attribute("attr"), -1, 1, False, ) def test_string_like(): result = parse("attr LIKE 'some%'") assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_string_ilike(): result = parse("attr ILIKE 'some%'") assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=True, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_string_not_like(): result = parse("attr NOT LIKE 'some%'") assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=True, wildcard="%", singlechar=".", escapechar="\\", ) def test_string_not_ilike(): result = parse("attr NOT ILIKE 'some%'") assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=True, not_=True, wildcard="%", singlechar=".", escapechar="\\", ) def test_attribute_in_list(): result = parse("attr IN (1, 2, 3, 4)") assert result == ast.In( ast.Attribute("attr"), [ 1, 2, 3, 4, ], False, ) def test_attribute_not_in_list(): result = parse("attr NOT IN ('A', 'B', 'C', 'D')") assert result == ast.In( ast.Attribute("attr"), [ "A", "B", "C", "D", ], True, ) def test_attribute_is_null(): result = parse("attr IS NULL") assert result == ast.IsNull(ast.Attribute("attr"), False) def test_attribute_is_not_null(): result = parse("attr IS NOT NULL") assert result == ast.IsNull(ast.Attribute("attr"), True) def test_attribute_exists(): result = parse("attr EXISTS") assert result == ast.Exists(ast.Attribute("attr"), False) def test_attribute_does_not_exist(): result = parse("attr DOES-NOT-EXIST") assert result == ast.Exists(ast.Attribute("attr"), True) def test_include(): result = parse("INCLUDE") assert result == ast.Include(False) def test_exclude(): result = parse("EXCLUDE") assert result == ast.Include(True) # Temporal predicate def test_attribute_before(): result = parse("attr BEFORE 2000-01-01T00:00:01Z") assert result == ast.TimeBefore( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_attribute_before_or_during_dt_dt(): result = parse("attr BEFORE OR DURING 2000-01-01T00:00:00Z / 2000-01-01T00:00:01Z") assert result == ast.TimeBeforeOrDuring( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_before_or_during_dt_dr(): result = parse("attr BEFORE OR DURING 2000-01-01T00:00:00Z / PT4S") assert result == ast.TimeBeforeOrDuring( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), timedelta(seconds=4), ), ) def test_attribute_before_or_during_dr_dt(): result = parse("attr BEFORE OR DURING PT4S / 2000-01-01T00:00:03Z") assert result == ast.TimeBeforeOrDuring( ast.Attribute("attr"), values.Interval( timedelta(seconds=4), datetime(2000, 1, 1, 0, 0, 3, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) # Spatial predicate def test_intersects_attr_point(): result = parse("INTERSECTS(geometry, POINT(1 1))") assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry(geometry.Point(1, 1).__geo_interface__), ) def test_disjoint_linestring_attr(): result = parse("DISJOINT(LINESTRING(1 1,2 2), geometry)") assert result == ast.GeometryDisjoint( values.Geometry( geometry.LineString([(1, 1), (2, 2)]).__geo_interface__, ), ast.Attribute("geometry"), ) def test_contains_attr_polygon(): result = parse("CONTAINS(geometry, POLYGON((1 1,2 2,0 3,1 1)))") assert result == ast.GeometryContains( ast.Attribute("geometry"), values.Geometry( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__, ), ) def test_within_multipolygon_attr(): result = parse("WITHIN(MULTIPOLYGON(((1 1,2 2,0 3,1 1))), geometry)") assert result == ast.GeometryWithin( values.Geometry( geometry.MultiPolygon.from_polygons( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) ).__geo_interface__, ), ast.Attribute("geometry"), ) def test_touches_attr_multilinestring(): result = parse("TOUCHES(geometry, MULTILINESTRING((1 1,2 2),(0 3,1 1)))") assert result == ast.GeometryTouches( ast.Attribute("geometry"), values.Geometry( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__, ), ) def test_crosses_attr_multilinestring(): result = parse("CROSSES(geometry, MULTILINESTRING((1 1,2 2),(0 3,1 1)))") assert result == ast.GeometryCrosses( ast.Attribute("geometry"), values.Geometry( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__, ), ) def test_overlaps_attr_multilinestring(): result = parse("OVERLAPS(geometry, MULTILINESTRING((1 1,2 2),(0 3,1 1)))") assert result == ast.GeometryOverlaps( ast.Attribute("geometry"), values.Geometry( geometry.MultiLineString.from_linestrings( geometry.LineString([(1, 1), (2, 2)]), geometry.LineString([(0, 3), (1, 1)]), ).__geo_interface__, ), ) def test_intersects_attr_point_ewkt(): result = parse("INTERSECTS(geometry, SRID=4326;POINT(1 1))") assert ( result.rhs.geometry["crs"]["properties"]["name"] == "urn:ogc:def:crs:EPSG::4326" ) assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry(geometry.Point(1, 1).__geo_interface__), ) def test_intersects_attr_geometrycollection(): result = parse( "INTERSECTS(geometry, GEOMETRYCOLLECTION(POINT(1 1)," "LINESTRING(1 1,2 2)," "POLYGON((1 1,2 2,0 3,1 1))" "))" ) assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry( geometry.GeometryCollection( [ geometry.Point(1, 1), geometry.LineString([(1, 1), (2, 2)]), geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]), ] ).__geo_interface__ ), ) # relate def test_relate_attr_polygon(): result = parse("RELATE(geometry, POLYGON((1 1,2 2,0 3,1 1)), '1*T***T**')") assert result == ast.Relate( ast.Attribute("geometry"), values.Geometry( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__, ), pattern="1*T***T**", ) # dwithin/beyond def test_dwithin_attr_polygon(): result = parse("DWITHIN(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, feet)") assert result == ast.DistanceWithin( ast.Attribute("geometry"), values.Geometry( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__, ), distance=5, units="feet", ) def test_beyond_attr_polygon(): result = parse("BEYOND(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, nautical miles)") assert result == ast.DistanceBeyond( ast.Attribute("geometry"), values.Geometry( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]).__geo_interface__, ), distance=5, units="nautical miles", ) # BBox prediacte def test_bbox_simple(): result = parse("BBOX(geometry, 1, 2, 3, 4)") assert result == ast.BBox( ast.Attribute("geometry"), 1, 2, 3, 4, ) def test_bbox_crs(): result = parse("BBOX(geometry, 1, 2, 3, 4, 'EPSG:3875')") assert result == ast.BBox( ast.Attribute("geometry"), 1, 2, 3, 4, "EPSG:3875", ) def test_bbox_negative(): result = parse("BBOX(geometry, -3, -4, -1, -2, 'EPSG:3875')") assert result == ast.BBox( ast.Attribute("geometry"), -3, -4, -1, -2, "EPSG:3875", ) def test_attribute_arithmetic_add(): result = parse("attr = 5 + 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 5, 2, ), ) def test_attribute_arithmetic_sub(): result = parse("attr = 5 - 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( 5, 2, ), ) def test_attribute_arithmetic_mul(): result = parse("attr = 5 * 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Mul( 5, 2, ), ) def test_attribute_arithmetic_div(): result = parse("attr = 5 / 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 5, 2, ), ) def test_attribute_arithmetic_add_mul(): result = parse("attr = 3 + 5 * 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 3, ast.Mul( 5, 2, ), ), ) def test_attribute_arithmetic_div_sub(): result = parse("attr = 3 / 5 - 2") assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( ast.Div( 3, 5, ), 2, ), ) def test_attribute_arithmetic_div_sub_bracketted(): result = parse("attr = 3 / (5 - 2)") assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 3, ast.Sub( 5, 2, ), ), ) # test function expression parsing def test_function_no_arg(): result = parse("attr = myfunc()") assert result == ast.Equal( ast.Attribute("attr"), ast.Function("myfunc", []), ) def test_function_single_arg(): result = parse("attr = myfunc(1)") assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [ 1, ], ), ) def test_function_attr_string_arg(): result = parse("attr = myfunc(other_attr, 'abc')") assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "myfunc", [ ast.Attribute("other_attr"), "abc", ], ), ) def test_attribute_eq_true_uppercase(): result = parse("attr = TRUE") assert result == ast.Equal( ast.Attribute("attr"), True, ) def test_attribute_eq_true_lowercase(): result = parse("attr = true") assert result == ast.Equal( ast.Attribute("attr"), True, ) def test_attribute_eq_false_uppercase(): result = parse("attr = FALSE") assert result == ast.Equal( ast.Attribute("attr"), False, ) def test_attribute_eq_false_lowercase(): result = parse("attr = false") assert result == ast.Equal( ast.Attribute("attr"), False, ) pygeofilter-0.3.1/tests/parsers/fes/000077500000000000000000000000001473475122500174545ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/fes/__init__.py000066400000000000000000000000001473475122500215530ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/fes/test_v11.py000066400000000000000000000337611473475122500215060ustar00rootroot00000000000000from pygeofilter import ast, values from pygeofilter.parsers.fes.v11 import parse def test_and(): result = parse( """ attr 30 attr 10 """ ) assert result == ast.And( ast.LessThan( ast.Attribute("attr"), 30, ), ast.GreaterThan( ast.Attribute("attr"), 10, ), ) def test_or(): result = parse( """ attr 30.5 attr 10.5 """ ) assert result == ast.Or( ast.LessEqual( ast.Attribute("attr"), 30.5, ), ast.GreaterEqual( ast.Attribute("attr"), 10.5, ), ) def test_not(): result = parse( """ attr value """ ) assert result == ast.Not( ast.Equal( ast.Attribute("attr"), "value", ), ) def test_not_equal(): result = parse( """ attr value """ ) assert result == ast.NotEqual( ast.Attribute("attr"), "value", ) def test_is_like(): result = parse( """ attr some% """ ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) # case insensitive result = parse( """ attr some% """ ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=True, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_is_null(): result = parse( """ attr """ ) assert result == ast.IsNull( ast.Attribute("attr"), not_=False, ) def test_is_between(): result = parse( """ attr 10.5 11.5 """ ) assert result == ast.Between( ast.Attribute("attr"), 10.5, 11.5, not_=False, ) def test_geom_equals(): result = parse( """ attr 1.0 1.0 """ ) assert result == ast.GeometryEquals( ast.Attribute("attr"), values.Geometry( { "type": "Point", "coordinates": (1.0, 1.0), "crs": { "type": "name", "properties": { "name": "http://www.opengis.net/def/crs/epsg/0/4326" }, }, } ), ) def test_geom_disjoint(): result = parse( """ attr 1.0 1.0 2.0 2.0 """ ) assert result == ast.GeometryDisjoint( ast.Attribute("attr"), values.Geometry( { "type": "LineString", "coordinates": [ (1.0, 1.0), (2.0, 2.0), ], } ), ) def test_geom_touches(): result = parse( """ attr 0.0 0.0 1.0 0.0 0.0 1.0 0.0 0.0 0.2 0.2 0.5 0.2 0.2 0.5 0.2 0.2 """ ) assert result == ast.GeometryTouches( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [(0.0, 0.0), (1.0, 0.0), (0.0, 1.0), (0.0, 0.0)], [(0.2, 0.2), (0.5, 0.2), (0.2, 0.5), (0.2, 0.2)], ], } ), ) def test_geom_within(): result = parse( """ attr 0.0 1.0 2.0 3.0 """ ) assert result == ast.GeometryWithin( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [ (0.0, 1.0), (0.0, 3.0), (2.0, 3.0), (2.0, 1.0), (0.0, 1.0), ], ], } ), ) def test_geom_overlaps(): result = parse( """ attr 0.0 0.0 1.0 0.0 0.0 1.0 0.0 0.0 0.2 0.2 0.5 0.2 0.2 0.5 0.2 0.2 10.0 10.0 11.0 10.0 10.0 11.0 10.0 10.0 10.2 10.2 10.5 10.2 10.2 10.5 10.2 10.2 """ ) assert result == ast.GeometryOverlaps( ast.Attribute("attr"), values.Geometry( { "type": "MultiPolygon", "coordinates": [ [ [(0.0, 0.0), (1.0, 0.0), (0.0, 1.0), (0.0, 0.0)], [(0.2, 0.2), (0.5, 0.2), (0.2, 0.5), (0.2, 0.2)], ], [ [(10.0, 10.0), (11.0, 10.0), (10.0, 11.0), (10.0, 10.0)], [(10.2, 10.2), (10.5, 10.2), (10.2, 10.5), (10.2, 10.2)], ], ], } ), ) def test_geom_crosses(): result = parse( """ attr 1.0 2.0 2.0 1.0 """ ) assert result == ast.GeometryCrosses( ast.Attribute("attr"), values.Geometry( {"type": "LineString", "coordinates": [(2.0, 1.0), (1.0, 2.0)]} ), ) def test_geom_intersects(): result = parse( """ attr 1.0 0.5 2.0 1.5 """ ) assert result == ast.GeometryIntersects( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "bbox": (0.5, 1.0, 1.5, 2.0), "coordinates": [ [(0.5, 1.0), (0.5, 2.0), (1.5, 2.0), (1.5, 1.0), (0.5, 1.0)] ], } ), ) def test_geom_contains(): result = parse( """ attr 1.0 0.5 2.0 0.5 2.0 1.5 1.0 1.5 1.0 0.5 """ ) assert result == ast.GeometryContains( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [(0.5, 1.0), (0.5, 2.0), (1.5, 2.0), (1.5, 1.0), (0.5, 1.0)] ], } ), ) def test_geom_dwithin(): result = parse( """ attr 1.0 1.0 10 """ ) assert result == ast.DistanceWithin( ast.Attribute("attr"), values.Geometry( { "type": "Point", "coordinates": (1.0, 1.0), } ), distance=10, units="m", ) pygeofilter-0.3.1/tests/parsers/fes/test_v20.py000066400000000000000000000405741473475122500215060ustar00rootroot00000000000000from datetime import datetime, timedelta from dateparser.timezone_parser import StaticTzInfo from pygeofilter import ast, values from pygeofilter.parsers.fes.v20 import parse def test_and(): result = parse( """ attr 30 attr 10 """ ) assert result == ast.And( ast.LessThan( ast.Attribute("attr"), 30, ), ast.GreaterThan( ast.Attribute("attr"), 10, ), ) def test_or(): result = parse( """ attr 30.5 attr 10.5 """ ) assert result == ast.Or( ast.LessEqual( ast.Attribute("attr"), 30.5, ), ast.GreaterEqual( ast.Attribute("attr"), 10.5, ), ) def test_not(): result = parse( """ attr value """ ) assert result == ast.Not( ast.Equal( ast.Attribute("attr"), "value", ), ) def test_not_equal(): result = parse( """ attr value """ ) assert result == ast.NotEqual( ast.Attribute("attr"), "value", ) def test_is_like(): result = parse( """ attr some% """ ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) # case insensitive result = parse( """ attr some% """ ) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=True, not_=False, wildcard="%", singlechar=".", escapechar="\\", ) def test_is_null(): result = parse( """ attr """ ) assert result == ast.IsNull( ast.Attribute("attr"), not_=False, ) def test_is_between(): result = parse( """ attr 10.5 11.5 """ ) assert result == ast.Between( ast.Attribute("attr"), 10.5, 11.5, not_=False, ) def test_geom_equals(): result = parse( """ attr 1.0 1.0 """ ) assert result == ast.GeometryEquals( ast.Attribute("attr"), values.Geometry( { "type": "Point", "coordinates": (1.0, 1.0), "crs": { "type": "name", "properties": { "name": "http://www.opengis.net/def/crs/epsg/0/4326" }, }, } ), ) def test_geom_disjoint(): result = parse( """ attr 1.0 1.0 2.0 2.0 """ ) assert result == ast.GeometryDisjoint( ast.Attribute("attr"), values.Geometry( { "type": "LineString", "coordinates": [ (1.0, 1.0), (2.0, 2.0), ], } ), ) def test_geom_touches(): result = parse( """ attr 0.0 0.0 1.0 0.0 0.0 1.0 0.0 0.0 0.2 0.2 0.5 0.2 0.2 0.5 0.2 0.2 """ ) assert result == ast.GeometryTouches( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [(0.0, 0.0), (1.0, 0.0), (0.0, 1.0), (0.0, 0.0)], [(0.2, 0.2), (0.5, 0.2), (0.2, 0.5), (0.2, 0.2)], ], } ), ) def test_geom_within(): result = parse( """ attr 0.0 1.0 2.0 3.0 """ ) assert result == ast.GeometryWithin( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [ (0.0, 1.0), (0.0, 3.0), (2.0, 3.0), (2.0, 1.0), (0.0, 1.0), ], ], } ), ) def test_geom_overlaps(): result = parse( """ attr 0.0 0.0 1.0 0.0 0.0 1.0 0.0 0.0 0.2 0.2 0.5 0.2 0.2 0.5 0.2 0.2 10.0 10.0 11.0 10.0 10.0 11.0 10.0 10.0 10.2 10.2 10.5 10.2 10.2 10.5 10.2 10.2 """ ) assert result == ast.GeometryOverlaps( ast.Attribute("attr"), values.Geometry( { "type": "MultiPolygon", "coordinates": [ [ [(0.0, 0.0), (1.0, 0.0), (0.0, 1.0), (0.0, 0.0)], [(0.2, 0.2), (0.5, 0.2), (0.2, 0.5), (0.2, 0.2)], ], [ [(10.0, 10.0), (11.0, 10.0), (10.0, 11.0), (10.0, 10.0)], [(10.2, 10.2), (10.5, 10.2), (10.2, 10.5), (10.2, 10.2)], ], ], } ), ) def test_geom_crosses(): result = parse( """ attr 1.0 2.0 2.0 1.0 """ ) assert result == ast.GeometryCrosses( ast.Attribute("attr"), values.Geometry( {"type": "LineString", "coordinates": [(2.0, 1.0), (1.0, 2.0)]} ), ) def test_geom_intersects(): result = parse( """ attr 1.0 0.5 2.0 1.5 """ ) assert result == ast.GeometryIntersects( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "bbox": (0.5, 1.0, 1.5, 2.0), "coordinates": [ [(0.5, 1.0), (0.5, 2.0), (1.5, 2.0), (1.5, 1.0), (0.5, 1.0)] ], } ), ) def test_geom_contains(): result = parse( """ attr 1.0 0.5 2.0 0.5 2.0 1.5 1.0 1.5 1.0 0.5 """ ) assert result == ast.GeometryContains( ast.Attribute("attr"), values.Geometry( { "type": "Polygon", "coordinates": [ [(0.5, 1.0), (0.5, 2.0), (1.5, 2.0), (1.5, 1.0), (0.5, 1.0)] ], } ), ) def test_geom_dwithin(): result = parse( """ attr 1.0 1.0 10 """ ) assert result == ast.DistanceWithin( ast.Attribute("attr"), values.Geometry( { "type": "Point", "coordinates": (1.0, 1.0), } ), distance=10, units="m", ) def test_after(): result = parse( """ attr 2000-01-01T00:00:00Z """ ) assert result == ast.TimeAfter( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_before(): # using timePosition directly result = parse( """ attr 2000-01-01T00:00:00Z """ ) assert result == ast.TimeBefore( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_begins(): # using timePosition directly result = parse( """ attr 2000-01-01T00:00:00Z 2001-01-01T00:00:00Z """ ) assert result == ast.TimeBegins( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2001, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) pygeofilter-0.3.1/tests/parsers/jfe/000077500000000000000000000000001473475122500174435ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/jfe/__init__.py000066400000000000000000000000001473475122500215420ustar00rootroot00000000000000pygeofilter-0.3.1/tests/parsers/jfe/test_parser.py000066400000000000000000000243241473475122500223550ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import json from datetime import datetime, timedelta from dateparser.timezone_parser import StaticTzInfo from pygeoif import geometry from pygeofilter import ast, values from pygeofilter.parsers.jfe import parse def normalize_geom(geometry): if hasattr(geometry, "__geo_interface__"): geometry = geometry.__geo_interface__ return json.loads(json.dumps(geometry)) def test_attribute_eq_literal(): result = parse('["==", ["get", "attr"], "A"]') assert result == ast.Equal( ast.Attribute("attr"), "A", ) def test_attribute_lt_literal(): result = parse('["<", ["get", "attr"], 5]') assert result == ast.LessThan( ast.Attribute("attr"), 5.0, ) def test_attribute_lte_literal(): result = parse('["<=", ["get", "attr"], 5]') assert result == ast.LessEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_gt_literal(): result = parse('[">", ["get", "attr"], 5]') assert result == ast.GreaterThan( ast.Attribute("attr"), 5.0, ) def test_attribute_gte_literal(): result = parse('[">=", ["get", "attr"], 5]') assert result == ast.GreaterEqual( ast.Attribute("attr"), 5.0, ) def test_attribute_ne_literal(): result = parse('["!=", ["get", "attr"], 5]') assert result == ast.NotEqual( ast.Attribute("attr"), 5.0, ) def test_string_like(): result = parse(["like", ["get", "attr"], "some%"]) assert result == ast.Like( ast.Attribute("attr"), "some%", nocase=False, wildcard="%", singlechar=".", escapechar="\\", not_=False, ) def test_string_like_wildcard(): result = parse(["like", ["get", "attr"], "some*", {"wildCard": "*"}]) assert result == ast.Like( ast.Attribute("attr"), "some*", nocase=False, wildcard="*", singlechar=".", escapechar="\\", not_=False, ) def test_attribute_in_list(): result = parse(["in", ["get", "attr"], 1, 2, 3, 4]) assert result == ast.In( ast.Attribute("attr"), [ 1, 2, 3, 4, ], False, ) def test_id_in_list(): result = parse(["in", ["id"], "someID", "anotherID"]) assert result == ast.In(ast.Attribute("id"), ["someID", "anotherID"], False) def test_attribute_before(): result = parse(["before", ["get", "attr"], "2000-01-01T00:00:01Z"]) assert result == ast.TimeBefore( ast.Attribute("attr"), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ) def test_attribute_after_dt_dt(): result = parse( ["after", ["get", "attr"], "2000-01-01T00:00:00Z", "2000-01-01T00:00:01Z"] ) assert result == ast.TimeAfter( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_attribute_during_dt_dt(): result = parse( ["during", ["get", "attr"], "2000-01-01T00:00:00Z", "2000-01-01T00:00:01Z"] ) assert result == ast.TimeDuring( ast.Attribute("attr"), values.Interval( datetime(2000, 1, 1, 0, 0, 0, tzinfo=StaticTzInfo("Z", timedelta(0))), datetime(2000, 1, 1, 0, 0, 1, tzinfo=StaticTzInfo("Z", timedelta(0))), ), ) def test_intersects_attr_point(): result = parse( [ "intersects", ["geometry"], { "type": "Point", "coordinates": [1, 1], }, ] ) assert result == ast.GeometryIntersects( ast.Attribute("geometry"), values.Geometry(normalize_geom(geometry.Point(1, 1).__geo_interface__)), ) def test_within_multipolygon_attr(): result = parse( [ "within", { "type": "MultiPolygon", "coordinates": [[[[1, 1], [2, 2], [0, 3], [1, 1]]]], "bbox": [0.0, 1.0, 2.0, 3.0], }, ["geometry"], ] ) assert result == ast.GeometryWithin( values.Geometry( normalize_geom( geometry.MultiPolygon.from_polygons( geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) ).__geo_interface__ ), ), ast.Attribute("geometry"), ) def test_logical_all(): result = parse( [ "all", [">", ["get", "height"], 50], ["==", ["get", "type"], "commercial"], ["get", "occupied"], ] ) assert result == ast.And( ast.And( ast.GreaterThan( ast.Attribute("height"), 50, ), ast.Equal(ast.Attribute("type"), "commercial"), ), ast.Attribute("occupied"), ) def test_logical_any(): result = parse(["any", ["<", ["get", "height"], 50], ["!", ["get", "occupied"]]]) assert result == ast.Or( ast.LessThan( ast.Attribute("height"), 50, ), ast.Not(ast.Attribute("occupied")), ) def test_attribute_arithmetic_add(): result = parse(["==", ["get", "attr"], ["+", 5, 2]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 5, 2, ), ) def test_attribute_arithmetic_sub(): result = parse(["==", ["get", "attr"], ["-", 5, 2]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( 5, 2, ), ) def test_attribute_arithmetic_mul(): result = parse(["==", ["get", "attr"], ["*", 5, 2]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Mul( 5, 2, ), ) def test_attribute_arithmetic_div(): result = parse(["==", ["get", "attr"], ["/", 5, 2]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 5, 2, ), ) def test_attribute_arithmetic_add_mul(): result = parse(["==", ["get", "attr"], ["+", 3, ["*", 5, 2]]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Add( 3, ast.Mul( 5, 2, ), ), ) def test_attribute_arithmetic_div_sub(): result = parse(["==", ["get", "attr"], ["-", ["/", 3, 5], 2]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub( ast.Div( 3, 5, ), 2, ), ) def test_attribute_arithmetic_div_sub_bracketted(): result = parse(["==", ["get", "attr"], ["/", 3, ["-", 5, 2]]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Div( 3, ast.Sub( 5, 2, ), ), ) def test_arithmetic_modulo(): result = parse(["==", ["get", "attr"], ["%", 3, 7]]) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "mod", [3, 7], ), ) def test_arithmetic_floor(): result = parse(["==", ["floor", ["get", "age"]], 42]) assert result == ast.Equal( ast.Function( "floor", [ ast.Attribute("age"), ], ), 42, ) def test_arithmetic_ceil(): result = parse(["==", ["ceil", ["get", "age"]], 42]) assert result == ast.Equal( ast.Function( "ceil", [ ast.Attribute("age"), ], ), 42, ) def test_arithmetic_abs(): result = parse([">", ["abs", ["get", "delta"]], 1]) assert result == ast.GreaterThan( ast.Function( "abs", [ ast.Attribute("delta"), ], ), 1, ) def test_arithmetic_pow(): result = parse([">", ["^", ["get", "size"], 2], 100]) assert result == ast.GreaterThan( ast.Function( "pow", [ast.Attribute("size"), 2], ), 100, ) def test_arithmetic_min(): result = parse([">", ["min", ["get", "wins"], ["get", "ties"]], 10]) assert result == ast.GreaterThan( ast.Function( "min", [ ast.Attribute("wins"), ast.Attribute("ties"), ], ), 10, ) def test_arithmetic_max(): result = parse([">", ["max", ["get", "wins"], ["get", "ties"]], 10]) assert result == ast.GreaterThan( ast.Function( "max", [ ast.Attribute("wins"), ast.Attribute("ties"), ], ), 10, ) pygeofilter-0.3.1/tests/test_geopandas/000077500000000000000000000000001473475122500202205ustar00rootroot00000000000000pygeofilter-0.3.1/tests/test_geopandas/__init__.py000066400000000000000000000000001473475122500223170ustar00rootroot00000000000000pygeofilter-0.3.1/tests/test_geopandas/test_evaluate.py000066400000000000000000000111571473475122500234440ustar00rootroot00000000000000from datetime import date, datetime import geopandas import numpy as np import pytest from shapely.geometry import Point from pygeofilter.backends.geopandas.evaluate import to_filter from pygeofilter.parsers.ecql import parse @pytest.fixture def data(): return geopandas.GeoDataFrame( { "str_attr": ["this is a test", "this is another test"], "maybe_str_attr": [None, "not null"], "int_attr": [5, 8], "float_attr": [5.5, 8.5], "date_attr": [date(2010, 1, 1), date(2010, 1, 10)], "datetime_attr": [datetime(2010, 1, 1), datetime(2010, 1, 10)], "point_attr": geopandas.GeoSeries([Point(1, 1), Point(2, 2)]), } ) def filter_(ast, data): function_map = { "sin": np.sin, } return data[to_filter(data, ast, {}, function_map)] def test_comparison(data): result = filter_(parse("int_attr = 5"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr < 6"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr > 6"), data) assert len(result) == 1 and result.index[0] == 1 result = filter_(parse("int_attr <= 5"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr >= 8"), data) assert len(result) == 1 and result.index[0] == 1 result = filter_(parse("int_attr <> 5"), data) assert len(result) == 1 and result.index[0] == 1 def test_combination(data): result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert len(result) == 1 and result.index[0] == 0 def test_between(data): result = filter_(parse("float_attr BETWEEN 4 AND 6"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr NOT BETWEEN 4 AND 6"), data) assert len(result) == 1 and result.index[0] == 1 def test_like(data): result = filter_(parse("str_attr LIKE 'this is . test'"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("str_attr LIKE 'this is % test'"), data) assert len(result) == 2 result = filter_(parse("str_attr NOT LIKE '% another test'"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("str_attr NOT LIKE 'this is . test'"), data) assert len(result) == 1 and result.index[0] == 1 result = filter_(parse("str_attr ILIKE 'THIS IS . TEST'"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("str_attr ILIKE 'THIS IS % TEST'"), data) assert len(result) == 2 def test_in(data): result = filter_(parse("int_attr IN ( 1, 2, 3, 4, 5 )"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("int_attr NOT IN ( 1, 2, 3, 4, 5 )"), data) assert len(result) == 1 and result.index[0] == 1 def test_null(data): result = filter_(parse("maybe_str_attr IS NULL"), data) assert len(result) == 1 and result.index[0] == 0 result = filter_(parse("maybe_str_attr IS NOT NULL"), data) assert len(result) == 1 and result.index[0] == 1 # TODO: possible? # def test_has_attr(data): # result = filter_(parse('extra_attr EXISTS'), data) # assert len(result) == 1 and result[0] is data[0] # result = filter_(parse('extra_attr DOES-NOT-EXIST'), data) # assert len(result) == 1 and result[0] is data[1] # def test_temporal(data): # result = filter_( # parse('date_attr BEFORE 2010-01-08T00:00:00.00Z'), # data # ) # assert len(result) == 1 and result.index[0] == 0 # result = filter_( # parse('date_attr AFTER 2010-01-08T00:00:00.00+01:00'), # data # ) # assert len(result) == 1 and result.index[0] == 1 def test_spatial(data): result = filter_( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), data, ) assert len(result) == 1 and result.index[0] == 0 result = filter_( parse("EQUALS(point_attr, POINT(2 2))"), data, ) assert len(result) == 1 and result.index[0] == 1 def test_arithmetic(data): result = filter_( parse("int_attr = float_attr - 0.5"), data, ) assert len(result) == 2 result = filter_( parse("int_attr = 5 + 20 / 2 - 10"), data, ) assert len(result) == 1 and result.index[0] == 0 def test_function(data): result = filter_( parse("sin(float_attr) BETWEEN -0.75 AND -0.70"), data, ) assert len(result) == 1 and result.index[0] == 0 pygeofilter-0.3.1/tests/test_optimize.py000066400000000000000000000201421473475122500204700ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from pygeofilter import ast from pygeofilter.backends.optimize import optimize from pygeofilter.parsers.ecql import parse def test_not(): result = optimize(parse("NOT 1 > 2")) assert result == ast.Include(False) result = optimize(parse("NOT 1 < 2")) assert result == ast.Include(True) def test_combination(): # reduce right hand side result = optimize(parse("attr = 1 AND 1 < 2")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce left hand side result = optimize(parse("1 < 2 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce left hand side result = optimize(parse("1 < 2 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # can' reduce result = optimize(parse("attr = 1 AND other = 2")) assert result == ast.And( ast.Equal(ast.Attribute("attr"), 1), ast.Equal(ast.Attribute("other"), 2) ) # reduce AND to an INCLUDE if both sides evaluate to true result = optimize(parse("1 = 1 AND 2 = 2")) assert result == ast.Include(False) # reduce AND to an EXCLUDE if either side evaluates to false result = optimize(parse("attr = 1 AND 2 = 3")) assert result == ast.Include(True) result = optimize(parse("2 = 3 AND attr = 1")) assert result == ast.Include(True) result = optimize(parse("0 = 1 AND 2 = 3")) assert result == ast.Include(True) # reduce OR to INCLUDE if either side evaluates to true result = optimize(parse("attr = 1 OR 2 = 2")) assert result == ast.Include(False) result = optimize(parse("2 = 2 OR attr = 1")) assert result == ast.Include(False) # reduce OR to an EXCLUDE if both sides evaluate to false result = optimize(parse("1 = 2 AND 2 = 1")) assert result == ast.Include(True) def test_comparison(): # reduce less than result = optimize(parse("1 < 2 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce greater than result = optimize(parse("2 > 1 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce less or equal result = optimize(parse("1 <= 2 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce greater or equal result = optimize(parse("2 >= 1 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # reduce not equal result = optimize(parse("2 <> 1 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) def test_between(): # allow reduction result = optimize(parse("5 BETWEEN 1 AND 6 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) result = optimize(parse("10 NOT BETWEEN 1 AND 6 AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # don't reduce if either lhs, low or high are uncertain result = optimize(parse("attr BETWEEN 1 AND 6")) assert result == ast.Between(ast.Attribute("attr"), 1, 6, False) result = optimize(parse("5 BETWEEN attr AND 6")) assert result == ast.Between(5, ast.Attribute("attr"), 6, False) result = optimize(parse("5 BETWEEN 1 AND attr")) assert result == ast.Between(5, 1, ast.Attribute("attr"), False) def test_like(): # allow reduction result = optimize(parse("'This is a test' LIKE 'This is %' AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) result = optimize(parse("'This is a test' LIKE 'This is . test' AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # don't reduction when an attribute is referenced result = optimize(parse("attr LIKE 'This is %'")) assert result == ast.Like( ast.Attribute("attr"), "This is %", False, "%", ".", "\\", False ) def test_in(): # allow reduction when the left hand side and all options # are certain result = optimize(parse("1 IN (1, 2, 3) AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) result = optimize(parse("5 NOT IN (1, 2, 3) AND attr = 1")) assert result == ast.Equal(ast.Attribute("attr"), 1) # don't allow reduction if either left hand side or either option # is uncertain result = optimize(parse("attr IN (1, 2, 3)")) assert result == ast.In(ast.Attribute("attr"), [1, 2, 3], False) result = optimize(parse("1 IN (attr, 2, 3)")) assert result == ast.In(1, [ast.Attribute("attr"), 2, 3], False) def test_temporal(): # TODO pass def test_array(): # TODO pass def test_spatial(): # TODO pass def test_arithmetic(): # test possible optimizations result = optimize(parse("attr = 10 + 10")) assert result == ast.Equal(ast.Attribute("attr"), 20) result = optimize(parse("attr = 30 - 10")) assert result == ast.Equal(ast.Attribute("attr"), 20) result = optimize(parse("attr = 10 * 2")) assert result == ast.Equal(ast.Attribute("attr"), 20) result = optimize(parse("attr = 40 / 2")) assert result == ast.Equal(ast.Attribute("attr"), 20) # test imppossible optimizations result = optimize(parse("attr = other + 10")) assert result == ast.Equal( ast.Attribute("attr"), ast.Add(ast.Attribute("other"), 10), ) result = optimize(parse("attr = other - 10")) assert result == ast.Equal( ast.Attribute("attr"), ast.Sub(ast.Attribute("other"), 10), ) result = optimize(parse("attr = other * 2")) assert result == ast.Equal( ast.Attribute("attr"), ast.Mul(ast.Attribute("other"), 2), ) result = optimize(parse("attr = other / 2")) assert result == ast.Equal( ast.Attribute("attr"), ast.Div(ast.Attribute("other"), 2), ) def test_function(): def myadder(a, b): return a + b result = optimize(parse("attr = myadder(1, 2)"), {"myadder": myadder}) assert result == ast.Equal( ast.Attribute("attr"), 3, ) # can't optimize a function referencing an attribute result = optimize(parse("attr = myadder(other, 2)"), {"myadder": myadder}) assert result == ast.Equal( ast.Attribute("attr"), ast.Function("myadder", [ast.Attribute("other"), 2]) ) # can't optimize a function with a nested reference to an attribute result = optimize(parse("attr = myadder(other + 2, 2)"), {"myadder": myadder}) assert result == ast.Equal( ast.Attribute("attr"), ast.Function("myadder", [ast.Add(ast.Attribute("other"), 2), 2]), ) # can't optimize an unknown functions result = optimize(parse("attr = unkown(1, 2)"), {"myadder": myadder}) assert result == ast.Equal( ast.Attribute("attr"), ast.Function( "unkown", [ 1, 2, ], ), ) pygeofilter-0.3.1/tests/test_sql/000077500000000000000000000000001473475122500170565ustar00rootroot00000000000000pygeofilter-0.3.1/tests/test_sql/__init__.py000066400000000000000000000000001473475122500211550ustar00rootroot00000000000000pygeofilter-0.3.1/tests/test_sql/test_evaluate.py000066400000000000000000000207261473475122500223040ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ import pytest from osgeo import ogr from pygeofilter.backends.sql import to_sql_where from pygeofilter.parsers.ecql import parse ogr.UseExceptions() @pytest.fixture def data(): driver = ogr.GetDriverByName("MEMORY") source = driver.CreateDataSource("data") layer = source.CreateLayer("layer") id_attr = ogr.FieldDefn("id", ogr.OFTInteger) layer.CreateField(id_attr) str_attr = ogr.FieldDefn("str_attr", ogr.OFTString) layer.CreateField(str_attr) maybe_str_attr = ogr.FieldDefn("maybe_str_attr", ogr.OFTString) layer.CreateField(maybe_str_attr) int_attr = ogr.FieldDefn("int_attr", ogr.OFTInteger) layer.CreateField(int_attr) float_attr = ogr.FieldDefn("float_attr", ogr.OFTReal) layer.CreateField(float_attr) date_attr = ogr.FieldDefn("date_attr", ogr.OFTDate) layer.CreateField(date_attr) datetime_attr = ogr.FieldDefn("datetime_attr", ogr.OFTDateTime) layer.CreateField(datetime_attr) feature_def = layer.GetLayerDefn() feature = ogr.Feature(feature_def) feature.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 1)")) feature.SetField("id", 0) feature.SetField("str_attr", "this is a test") feature.SetField("maybe_str_attr", None) feature.SetField("int_attr", 5) feature.SetField("float_attr", 5.5) feature.SetField("date_attr", "2010-01-01") feature.SetField("datetime_attr", "2010-01-01T00:00:00Z") layer.CreateFeature(feature) feature = None feature_def = layer.GetLayerDefn() feature = ogr.Feature(feature_def) feature.SetGeometry(ogr.CreateGeometryFromWkt("POINT (2 2)")) feature.SetField("id", 1) feature.SetField("str_attr", "this is another test") feature.SetField("maybe_str_attr", "not null") feature.SetField("int_attr", 8) feature.SetField("float_attr", 8.5) feature.SetField("date_attr", "2010-01-10") feature.SetField("datetime_attr", "2010-10-01T00:00:00Z") layer.CreateFeature(feature) feature = None return source FIELD_MAPPING = { "str_attr": "str_attr", "maybe_str_attr": "maybe_str_attr", "int_attr": "int_attr", "float_attr": "float_attr", "date_attr": "date_attr", "datetime_attr": "datetime_attr", "point_attr": "GEOMETRY", } FUNCTION_MAP = {"sin": "sin"} def filter_(ast, data): where = to_sql_where(ast, FIELD_MAPPING, FUNCTION_MAP) return data.ExecuteSQL( f""" SELECT id, str_attr, maybe_str_attr, int_attr, float_attr, date_attr, datetime_attr, GEOMETRY FROM layer WHERE {where} """, None, "SQLite", ) def test_comparison(data): result = filter_(parse("int_attr = 5"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr < 6"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr > 6"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 result = filter_(parse("int_attr <= 5"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr >= 8"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 result = filter_(parse("int_attr <> 5"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 def test_combination(data): result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr = 5 AND float_attr < 6.0"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 def test_between(data): result = filter_(parse("float_attr BETWEEN 4 AND 6"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr NOT BETWEEN 4 AND 6"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 def test_like(data): result = filter_(parse("str_attr LIKE 'this is . test'"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("str_attr LIKE 'this is % test'"), data) assert result.GetFeatureCount() == 2 result = filter_(parse("str_attr NOT LIKE '% another test'"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("str_attr NOT LIKE 'this is . test'"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 result = filter_(parse("str_attr ILIKE 'THIS IS . TEST'"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("str_attr ILIKE 'THIS IS % TEST'"), data) assert result.GetFeatureCount() == 2 def test_in(data): result = filter_(parse("int_attr IN ( 1, 2, 3, 4, 5 )"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("int_attr NOT IN ( 1, 2, 3, 4, 5 )"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 def test_null(data): result = filter_(parse("maybe_str_attr IS NULL"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_(parse("maybe_str_attr IS NOT NULL"), data) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 # TODO: possible? # def test_has_attr(data): # result = filter_(parse('extra_attr EXISTS'), data) # assert len(result) == 1 and result[0] is data[0] # result = filter_(parse('extra_attr DOES-NOT-EXIST'), data) # assert len(result) == 1 and result[0] is data[1] # def test_temporal(data): # result = filter_( # parse('date_attr BEFORE 2010-01-08T00:00:00.00Z'), # data # ) # assert len(result) == 1 and result.index[0] == 0 # result = filter_( # parse('date_attr AFTER 2010-01-08T00:00:00.00+01:00'), # data # ) # assert len(result) == 1 and result.index[0] == 1 def test_spatial(data): result = filter_( parse("INTERSECTS(point_attr, ENVELOPE (0 1 0 1))"), data, ) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 result = filter_( parse("EQUALS(point_attr, POINT(2 2))"), data, ) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 1 def test_arithmetic(data): result = filter_( parse("int_attr = float_attr - 0.5"), data, ) assert result.GetFeatureCount() == 2 result = filter_( parse("int_attr = 5 + 20 / 2 - 10"), data, ) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 def test_function(data): result = filter_( parse("sin(float_attr) BETWEEN -0.75 AND -0.70"), data, ) assert result.GetFeatureCount() == 1 and result.GetFeature(0).GetField(0) == 0 pygeofilter-0.3.1/tests/test_utils.py000066400000000000000000000062061473475122500177750ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # # Project: pygeofilter # Authors: Fabian Schindler # # ------------------------------------------------------------------------------ # Copyright (C) 2021 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ------------------------------------------------------------------------------ from pygeofilter.util import like_pattern_to_re SEARCH_STRING = "This is a test" def test_basic_single(): pattern = r"This is . test" regex = like_pattern_to_re( pattern, nocase=False, wildcard="%", single_char=".", escape_char="\\", ) assert regex.match(SEARCH_STRING) is not None def test_basic(): pattern = r"% a test" regex = like_pattern_to_re( pattern, nocase=False, wildcard="%", single_char=".", escape_char="\\", ) assert regex.match(SEARCH_STRING) is not None def test_basic_nocase(): pattern = r"% A TEST" regex = like_pattern_to_re( pattern, nocase=True, wildcard="%", single_char=".", escape_char="\\", ) assert regex.match(SEARCH_STRING) is not None def test_basic_regex_escape_re_func(): pattern = r".* a test" regex = like_pattern_to_re( pattern, nocase=True, wildcard="%", single_char=".", escape_char="\\", ) assert regex.match(SEARCH_STRING) is None def test_basic_regex_escape_char(): search_string = r"This is a % sign" pattern = r"This is a /% sign" regex = like_pattern_to_re( pattern, nocase=True, wildcard="%", single_char=".", escape_char="/", ) assert regex.match(search_string) is not None def test_basic_regex_escape_char_2(): search_string = r"This is a . sign" pattern = r"This is a /. sign" regex = like_pattern_to_re( pattern, nocase=True, wildcard="%", single_char=".", escape_char="/", ) assert regex.match(search_string) is not None