pax_global_header00006660000000000000000000000064143623426040014516gustar00rootroot0000000000000052 comment=f3a98ceac23d76140657e3645acb66f9dfa04e14 polyline-2.0.0/000077500000000000000000000000001436234260400133505ustar00rootroot00000000000000polyline-2.0.0/.gitattributes000066400000000000000000000000141436234260400162360ustar00rootroot00000000000000* text=auto polyline-2.0.0/.github/000077500000000000000000000000001436234260400147105ustar00rootroot00000000000000polyline-2.0.0/.github/workflows/000077500000000000000000000000001436234260400167455ustar00rootroot00000000000000polyline-2.0.0/.github/workflows/lint-test-docs.yml000066400000000000000000000014501436234260400223410ustar00rootroot00000000000000name: lint-test-build on: [push] jobs: test: runs-on: ubuntu-latest strategy: matrix: python-version: ["3.7", "3.8", "3.9", "3.10", "3.11","pypy3.7", "pypy3.8", "pypy3.9"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install -e .[dev] - name: Lint with pylint run: | python -m pylint polyline - name: Test with pytest run: | python -m pytest - name: Build docs run: | python -m sphinx.cmd.build -b html docs docs/_build/html polyline-2.0.0/.gitignore000066400000000000000000000006101436234260400153350ustar00rootroot00000000000000*.egg *.egg-info/ *.log *.manifest *.mo *.pot *.py[cod] *.so *.spec *.un~ *~ .cache .coverage .directory .installed.cfg .netrwhist .Python .tox/ build/ coverage.xml develop-eggs/ dist/ docs/_build/ downloads/ eggs/ env/ htmlcov/ lib/ lib64/ MANIFEST nosetests.xml parts/ pip-delete-this-directory.txt pip-log.txt sdist/ Session.vim target/ var/ [._]*.s[a-w][a-z] [._]s[a-w][a-z] __pycache__/ polyline-2.0.0/.pylintrc000066400000000000000000000476521436234260400152330ustar00rootroot00000000000000[MAIN] # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Load and enable all available extensions. Use --list-extensions to see a list # all available extensions. #enable-all-extensions= # In error mode, messages with a category besides ERROR or FATAL are # suppressed, and no reports are done by default. Error mode is compatible with # disabling specific errors. #errors-only= # Always return a 0 (non-error) status code, even if lint errors are found. # This is primarily useful in continuous integration scripts. #exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist= # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. fail-on= # Specify a score threshold to be exceeded before program exits with error. fail-under=10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. #from-stdin= # Files or directories to be skipped. They should be base names, not paths. ignore=CVS # Add files or directories matching the regex patterns to the ignore-list. The # regex matches against paths and can be in Posix or Windows format. ignore-paths= # Files or directories matching the regex patterns are skipped. The regex # matches against base names, not paths. The default value ignores Emacs file # locks ignore-patterns=^\.# # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.10 # Discover python modules and packages in the file system subtree. recursive=no # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # In verbose mode, extra non-checker-related info will be displayed. #verbose= [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each # category, as well as 'statement' which is the total number of statements # analyzed. This score is used by the global evaluation report (RP0004). evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. score=yes [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, # UNDEFINED. confidence=HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, use-symbolic-message-instead, unnecessary-lambda-assignment, redundant-unittest-assert, duplicate-code, logging-fstring-interpolation # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set # naming style. argument-rgx=^(_?)[a-z][a-z0-9]*(_[a-z0-9]+)*$ # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. #class-attribute-rgx= # Naming style matching correct class constant names. class-const-naming-style=UPPER_CASE # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. #class-const-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. If left empty, class names will be checked with the set naming style. class-rgx=^[a-zA-Z][a-z0-9]*(_[a-z0-9]+)*$ # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming # style. const-rgx=^(_?)[a-zA-Z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set # naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set # naming style. variable-rgx=^(_?)[a-z][a-z0-9]*(_[a-z0-9]+)*(_?)(_?)$ [CLASSES] # Warn about protected attribute access inside special methods check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict, _fields, _replace, _source, _make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=cls [DESIGN] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) exclude-too-few-public-methods= # List of qualified class names to ignore when counting class parents (see # R0901) ignored-parents= # Maximum number of arguments for function / method. max-args=12 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=40 # Maximum number of locals for function / method body. max-locals=30 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=40 # Maximum number of return / yield for function / method body. max-returns=40 # Maximum number of statements in function / method body. max-statements=100 # Minimum number of public methods for a class (see R0903). min-public-methods=0 [EXCEPTIONS] # Exceptions that will emit a warning when caught. overgeneral-exceptions=BaseException, Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=120 # Maximum number of lines in a module. max-module-lines=4000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules= # Output a graph (.gv or any supported image format) of external dependencies # to the given file (report RP0402 must not be disabled). ext-import-graph= # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be # disabled). import-graph= # Output a graph (.gv or any supported image format) of internal dependencies # to the given file (report RP0402 must not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit,argparse.parse_error [SIMILARITIES] # Comments are removed from the similarity computation ignore-comments=yes # Docstrings are removed from the similarity computation ignore-docstrings=yes # Imports are removed from the similarity computation ignore-imports=yes # Signatures are removed from the similarity computation ignore-signatures=yes # Minimum lines number of a similarity. min-similarity-lines=4 [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: none. To make it work, # install the 'python-enchant' package. spelling-dict= # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins=no-member, not-async-context-manager, not-context-manager, attribute-defined-outside-init # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx=.*[Mm]ixin # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of names allowed to shadow builtins allowed-redefined-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io polyline-2.0.0/.readthedocs.yaml000066400000000000000000000002611436234260400165760ustar00rootroot00000000000000version: 2 sphinx: configuration: docs/conf.py formats: - pdf python: version: "3.8" install: - method: pip path: . extra_requirements: - dev polyline-2.0.0/LICENSE000066400000000000000000000021121436234260400143510ustar00rootroot00000000000000Copyright (c) 2023 Frederick Jansen Copyright (c) 2014 Bruno M. Custódio Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. polyline-2.0.0/README.rst000066400000000000000000000103441436234260400150410ustar00rootroot00000000000000polyline ======== .. image:: http://img.shields.io/pypi/v/polyline.svg?style=flat :target: https://pypi.python.org/pypi/polyline/ .. image:: https://readthedocs.org/projects/polyline/badge/?version=latest :target: https://polyline.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. image:: https://github.com/frederickjansen/polyline/actions/workflows/lint-test-docs.yml/badge.svg :target: https://github.com/frederickjansen/polyline/actions/workflows/lint-test-docs.yml :alt: Build ``polyline`` is a Python implementation of Google's `Encoded Polyline Algorithm Format `__. It is essentially a port of `Mapbox polyline `__ with some additional features. Installation ------------ ``polyline`` can be installed using ``pip``:: $ pip install polyline Starting from ``v2.0.0`` only Python 3.7 and above is supported. For Python 2 support, please install ``v1.4.0``:: $ pip install polyline==1.4.0 API Documentation ----------------- Encoding ^^^^^^^^ To get the encoded polyline representation of a given set of (lat, lon) coordinates:: import polyline polyline.encode([(38.5, -120.2), (40.7, -120.9), (43.2, -126.4)], 5) This should return ``_p~iF~ps|U_ulL~ugC_hgN~eq`@``. You can set the required precision with the optional ``precision`` parameter. The default value is 5. You can encode (lon, lat) tuples by setting ``geojson=True``. Decoding ^^^^^^^^ To get a set of coordinates represented by a given encoded polyline string:: import polyline polyline.decode('u{~vFvyys@fS]', 5) This should return ``[(40.63179, -8.65708), (40.62855, -8.65693)]`` in (lat, lon) order. You can set the required precision with the optional ``precision`` parameter. The default value is 5. You can decode into (lon, lat) tuples by setting ``geojson=True``. Development ----------- All installation and development dependencies are fully specified in ``pyproject.toml``. The ``project.optional-dependencies`` object is used to `specify optional requirements `__ for various development tasks. This makes it possible to specify additional options when performing installation using ``pip``:: python -m pip install .[dev] Documentation ^^^^^^^^^^^^^ The documentation can be generated automatically from the source files using `Sphinx `__:: python -m sphinx.cmd.build -b html docs docs/_build/html Testing and Conventions ^^^^^^^^^^^^^^^^^^^^^^^ All unit tests are executed and their coverage is measured when using `pytest `__:: python -m pytest Style conventions are enforced using `Pylint `__:: python -m pylint polyline Contributions ^^^^^^^^^^^^^ In order to contribute to the source code, open an issue or submit a pull request on the `GitHub page `__ for this library. Versioning ^^^^^^^^^^ Beginning with version 0.1.0, the version number format for this library and the changes to the library associated with version number increments conform with `Semantic Versioning 2.0.0 `__. Publishing ^^^^^^^^^^ This library can be published as a `package on PyPI `__ by a package maintainer. Ensure that the correct version number appears in ``pyproject.toml``, and that any links in this README document to the Read the Docs documentation of this package (or its dependencies) have appropriate version numbers. Also ensure that the Read the Docs project for this library has an `automation rule `__ that activates and sets as the default all tagged versions. Create and push a tag for this version (replacing ``?.?.?`` with the version number):: git tag ?.?.? git push origin ?.?.? Remove any old build/distribution files. Then, package the source into a distribution archive:: rm -rf build dist src/*.egg-info python -m build --sdist --wheel . Finally, upload the package distribution archive to `PyPI `__:: python -m twine upload dist/* polyline-2.0.0/docs/000077500000000000000000000000001436234260400143005ustar00rootroot00000000000000polyline-2.0.0/docs/Makefile000066400000000000000000000011721436234260400157410ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) polyline-2.0.0/docs/conf.py000066400000000000000000000050301436234260400155750ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('../src')) # -- Project information ----------------------------------------------------- # The name and version are retrieved from ``pyproject.toml`` in the root # directory. import toml with open('../pyproject.toml') as pyproject_file: pyproject_data = toml.load(pyproject_file) project = pyproject_data['project']['name'] version = pyproject_data['project']['version'] release = version # The copyright year and holder information is retrieved from the # ``LICENSE`` file. import re with open('../LICENSE', 'r') as license_file: license_string = license_file.read().split('Copyright (c) ')[1] year = license_string[:4] author = license_string[5:].split('\n')[0] copyright = year + ', ' + re.sub(r"\.$", "", author) # Period already in HTML. # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] polyline-2.0.0/docs/index.rst000066400000000000000000000000331436234260400161350ustar00rootroot00000000000000.. include:: ../README.rst polyline-2.0.0/docs/make.bat000066400000000000000000000013751436234260400157130ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd polyline-2.0.0/pyproject.toml000066400000000000000000000016421436234260400162670ustar00rootroot00000000000000[project] name = "polyline" version = "2.0.0" description = """ A Python implementation of Google's Encoded Polyline Algorithm Format. """ license = {file = "LICENSE"} authors = [ {name = "Frederick Jansen"}, {name = "Bruno M. Custódio"}, ] readme = "README.rst" requires-python = ">=3.7" dependencies = [ ] [project.urls] homepage = "https://github.com/frederickjansen/polyline" repository = "https://github.com/frederickjansen/polyline" [project.optional-dependencies] dev = [ "pytest~=7.0", "pytest-cov~=4.0", "pylint~=2.15.10", "sphinx~=4.2.0", "sphinx-rtd-theme~=1.0.0", "toml~=0.10.2", ] publish = [ "build~=0.8", "twine~=4.0" ] [build-system] requires = [ "build~=0.8", "setuptools~=65.0", "wheel~=0.37" ] build-backend = "setuptools.build_meta" [tool.pytest.ini_options] addopts = "--doctest-modules --ignore=docs --cov=polyline --cov-report term-missing" polyline-2.0.0/src/000077500000000000000000000000001436234260400141375ustar00rootroot00000000000000polyline-2.0.0/src/polyline/000077500000000000000000000000001436234260400157725ustar00rootroot00000000000000polyline-2.0.0/src/polyline/__init__.py000066400000000000000000000001611436234260400201010ustar00rootroot00000000000000""" Interface for the Polyline library. """ from .polyline import decode, encode __all__ = ['decode', 'encode'] polyline-2.0.0/src/polyline/polyline.py000066400000000000000000000060351436234260400202030ustar00rootroot00000000000000""" A Python implementation of Google's Encoded Polyline Algorithm Format. """ import io import itertools import math from typing import List, Tuple def _pcitr(iterable): return zip(iterable, itertools.islice(iterable, 1, None)) def _py2_round(x): # The polyline algorithm uses Python 2's way of rounding return int(math.copysign(math.floor(math.fabs(x) + 0.5), x)) def _write(output, curr_value, prev_value, factor): curr_value = _py2_round(curr_value * factor) prev_value = _py2_round(prev_value * factor) coord = curr_value - prev_value coord <<= 1 coord = coord if coord >= 0 else ~coord while coord >= 0x20: output.write(chr((0x20 | (coord & 0x1f)) + 63)) coord >>= 5 output.write(chr(coord + 63)) def _trans(value, index): byte, result, shift = None, 0, 0 comp = None while byte is None or byte >= 0x20: byte = ord(value[index]) - 63 index += 1 result |= (byte & 0x1f) << shift shift += 5 comp = result & 1 return ~(result >> 1) if comp else (result >> 1), index def decode(expression: str, precision: int = 5, geojson: bool = False) -> List[Tuple[float, float]]: """ Decode a polyline string into a set of coordinates. :param expression: Polyline string, e.g. 'u{~vFvyys@fS]'. :param precision: Precision of the encoded coordinates. Google Maps uses 5, OpenStreetMap uses 6. The default value is 5. :param geojson: Set output of tuples to (lon, lat), as per https://tools.ietf.org/html/rfc7946#section-3.1.1 :return: List of coordinate tuples in (lat, lon) order, unless geojson is set to True. """ coordinates, index, lat, lng, length, factor = [], 0, 0, 0, len(expression), float(10 ** precision) while index < length: lat_change, index = _trans(expression, index) lng_change, index = _trans(expression, index) lat += lat_change lng += lng_change coordinates.append((lat / factor, lng / factor)) if geojson is True: coordinates = [t[::-1] for t in coordinates] return coordinates def encode(coordinates: List[Tuple[float, float]], precision: int = 5, geojson: bool = False) -> str: """ Encode a set of coordinates in a polyline string. :param coordinates: List of coordinate tuples, e.g. [(0, 0), (1, 0)]. Unless geojson is set to True, the order is expected to be (lat, lon). :param precision: Precision of the coordinates to encode. Google Maps uses 5, OpenStreetMap uses 6. The default value is 5. :param geojson: Set to True in order to encode (lon, lat) tuples. :return: The encoded polyline string. """ if geojson is True: coordinates = [t[::-1] for t in coordinates] output, factor = io.StringIO(), int(10 ** precision) _write(output, coordinates[0][0], 0, factor) _write(output, coordinates[0][1], 0, factor) for prev, curr in _pcitr(coordinates): _write(output, curr[0], prev[0], factor) _write(output, curr[1], prev[1], factor) return output.getvalue() polyline-2.0.0/tests/000077500000000000000000000000001436234260400145125ustar00rootroot00000000000000polyline-2.0.0/tests/test_polyline.py000066400000000000000000000126121436234260400177600ustar00rootroot00000000000000from random import uniform, randint import time import polyline def test_decode_multiple_points(): d = polyline.decode('gu`wFnfys@???nKgE??gE?????oK????fE??fE') assert d == [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ] def test_decode_multiple_points_precision(): d = polyline.decode('_epolA~ieoOnF??~{Bo}@??o}@?????_|B????n}@??n}@', 6) assert d == [ (40.64112, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ] def test_decode_official_example(): d = polyline.decode('_p~iF~ps|U_ulLnnqC_mqNvxq`@') assert d == [ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ] def test_decode_geojson(): d = polyline.decode('_p~iF~ps|U_ulLnnqC_mqNvxq`@', geojson=True) assert d == [ (-120.200, 38.500), (-120.950, 40.700), (-126.453, 43.252) ] def test_decode_official_example_precision(): d = polyline.decode('_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI', 6) assert d == [ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ] def test_decode_single_point(): d = polyline.decode('gu`wFf`ys@') assert d == [ (40.641, -8.653) ] def test_decode_single_point_precision(): d = polyline.decode('o}oolAnkcoO', 6) assert d == [ (40.641, -8.653) ] def test_encode_multiple_points(): e = polyline.encode([ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ]) assert e == 'gu`wFnfys@???nKgE??gE?????oK????fE??fE' def test_encode_multiple_points_precision(): e = polyline.encode([ (40.64112345, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ], 6) assert e == 'eepolA~ieoOtF??~{Bo}@??o}@?????_|B????n}@??n}@' def test_encode_official_example(): e = polyline.encode([ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ]) assert e == '_p~iF~ps|U_ulLnnqC_mqNvxq`@' def test_encode_geojson(): e = polyline.encode([ (-120.200, 38.500), (-120.950, 40.700), (-126.453, 43.252) ], geojson=True) assert e == '_p~iF~ps|U_ulLnnqC_mqNvxq`@' def test_encode_official_example_precision(): e = polyline.encode([ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ], 6) assert e == '_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI' def test_encode_single_point(): e = polyline.encode([ (40.64155, -8.65344) ]) assert e == 'ux`wF~bys@' e = polyline.encode([ (40.641552, -8.653441) ]) assert e == 'ux`wF~bys@' def test_encode_single_point_rounding(): e = polyline.encode([ (0, 0.000006), (0, 0.000002) ]) assert e == '?A?@' def test_rounding_py3_match_py2(): e = polyline.encode([ (36.05322, -112.084004), (36.053573, -112.083914), (36.053845, -112.083965)]) assert e == 'ss`{E~kbkTeAQw@J' def test_encode_single_point_precision(): e = polyline.encode([ (40.641123, -8.653321) ], 6) assert e == 'eepolAp_doO' e = polyline.encode([ (40.6411233123, -8.6533214234) ], 6) assert e == 'eepolAp_doO' def test_a_variety_of_precisions(): """uses a generator to create a variety of lat-lon's across the global and tests a range of precision settings from 4 to 8""" def generator(): while True: coords = [] for i in range(2, randint(4, 10)): lat, lon = uniform(-180.0, 180.0), uniform(-180.0, 180.0) coords.append((lat, lon)) yield coords patience = 3 # seconds. waypoints, okays = 0, 0 g = generator() start = time.time() while time.time() < start + patience: precision = randint(4, 8) wp = next(g) waypoints += len(wp) poly = polyline.encode(wp, precision) wp2 = polyline.decode(poly, precision) if wp == wp2: okays += len(wp2) else: for idx, _ in enumerate(wp): dx, dy = abs(wp[idx][0] - wp2[idx][0]), abs(wp[idx][1] - wp2[idx][1]) if dx > 10 ** -(precision - 1) or dy > 10 ** -(precision - 1): print(f"idx={idx}, dx={dx}, dy={dy}") else: okays += 1 assert okays == waypoints print( f"encoded and decoded {100 * okays / float(waypoints):.2f}% correctly for {waypoints} " f"waypoints @ {round(waypoints / patience, 0)} wp/sec")