pax_global_header00006660000000000000000000000064141544064610014517gustar00rootroot0000000000000052 comment=5f2cb59a0c99ad27643e2615cbf6fd4977e6c3c0 pytroll-schedule-0.6.0/000077500000000000000000000000001415440646100150215ustar00rootroot00000000000000pytroll-schedule-0.6.0/.bumpversion.cfg000066400000000000000000000001521415440646100201270ustar00rootroot00000000000000[bumpversion] current_version = 0.5.0 commit = True tag = True [bumpversion:file:trollsched/version.py] pytroll-schedule-0.6.0/.gitattributes000066400000000000000000000000431415440646100177110ustar00rootroot00000000000000trollsched/version.py export-subst pytroll-schedule-0.6.0/.gitchangelog.rc000066400000000000000000000144101415440646100200600ustar00rootroot00000000000000## ## Format ## ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] ## ## Description ## ## ACTION is one of 'chg', 'fix', 'new' ## ## Is WHAT the change is about. ## ## 'chg' is for refactor, small improvement, cosmetic changes... ## 'fix' is for bug fixes ## 'new' is for new features, big improvement ## ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' ## ## Is WHO is concerned by the change. ## ## 'dev' is for developpers (API changes, refactors...) ## 'usr' is for final users (UI changes) ## 'pkg' is for packagers (packaging changes) ## 'test' is for testers (test only related changes) ## 'doc' is for doc guys (doc only changes) ## ## COMMIT_MSG is ... well ... the commit message itself. ## ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' ## ## They are preceded with a '!' or a '@' (prefer the former, as the ## latter is wrongly interpreted in github.) Commonly used tags are: ## ## 'refactor' is obviously for refactoring code only ## 'minor' is for a very meaningless change (a typo, adding a comment) ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) ## 'wip' is for partial functionality but complete subfunctionality. ## ## Example: ## ## new: usr: support of bazaar implemented ## chg: re-indentend some lines !cosmetic ## new: dev: updated code to be compatible with last version of killer lib. ## fix: pkg: updated year of licence coverage. ## new: test: added a bunch of test around user usability of feature X. ## fix: typo in spelling my name in comment. !minor ## ## Please note that multi-line commit message are supported, and only the ## first line will be considered as the "summary" of the commit message. So ## tags, and other rules only applies to the summary. The body of the commit ## message will be displayed in the changelog without reformatting. ## ## ``ignore_regexps`` is a line of regexps ## ## Any commit having its full commit message matching any regexp listed here ## will be ignored and won't be reported in the changelog. ## ignore_regexps = [ r'@minor', r'!minor', r'@cosmetic', r'!cosmetic', r'@refactor', r'!refactor', r'@wip', r'!wip', r'^Merge commit .* into HEAD', r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:', r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', ] ## ``section_regexps`` is a list of 2-tuples associating a string label and a ## list of regexp ## ## Commit messages will be classified in sections thanks to this. Section ## titles are the label, and a commit is classified under this section if any ## of the regexps associated is matching. ## section_regexps = [ ('New', [ r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', ]), ('Changes', [ r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', ]), ('Fix', [ r'^([Bb]ug)?[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', ]), ('Other', None ## Match all lines ), ] ## ``body_process`` is a callable ## ## This callable will be given the original body and result will ## be used in the changelog. ## ## Available constructs are: ## ## - any python callable that take one txt argument and return txt argument. ## ## - ReSub(pattern, replacement): will apply regexp substitution. ## ## - Indent(chars=" "): will indent the text with the prefix ## Please remember that template engines gets also to modify the text and ## will usually indent themselves the text if needed. ##git log --pretty=format:"- %s%n%b" --since="$(git show -s --format=%ad `git rev-list --tags --max-count=1`)" ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns ## ## - noop: do nothing ## ## - ucfirst: ensure the first letter is uppercase. ## (usually used in the ``subject_process`` pipeline) ## ## - final_dot: ensure text finishes with a dot ## (usually used in the ``subject_process`` pipeline) ## ## - strip: remove any spaces before or after the content of the string ## ## Additionally, you can `pipe` the provided filters, for instance: #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ") #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') #body_process = noop body_process = ReSub(r'(?m)\s*^Signed-off-by: .*$\s*', '') ## ``subject_process`` is a callable ## ## This callable will be given the original subject and result will ## be used in the changelog. ## ## Available constructs are those listed in ``body_process`` doc. subject_process = (strip | ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | ucfirst | final_dot) ## ``tag_filter_regexp`` is a regexp ## ## Tags that will be used for the changelog must match this regexp. ## tag_filter_regexp = r'^v[0-9]+\.[0-9]+(\.[0-9]+)?$' ## ``unreleased_version_label`` is a string ## ## This label will be used as the changelog Title of the last set of changes ## between last valid tag and HEAD if any. #unreleased_version_label = "%%version%% (unreleased)" unreleased_version_label = "Unreleased" ## ``output_engine`` is a callable ## ## This will change the output format of the generated changelog file ## ## Available choices are: ## ## - rest_py ## ## Legacy pure python engine, outputs ReSTructured text. ## This is the default. ## ## - mustache() ## ## Template name could be any of the available templates in ## ``templates/mustache/*.tpl``. ## Requires python package ``pystache``. ## Examples: ## - mustache("markdown") ## - mustache("restructuredtext") ## ## - makotemplate() ## ## Template name could be any of the available templates in ## ``templates/mako/*.tpl``. ## Requires python package ``mako``. ## Examples: ## - makotemplate("restructuredtext") ## output_engine = rest_py #output_engine = mustache("restructuredtext") #output_engine = mustache("markdown") #output_engine = makotemplate("restructuredtext") ## ``include_merges`` is a boolean ## ## This option tells git-log whether to include merge commits in the log. ## The default is to include them. include_merges = False pytroll-schedule-0.6.0/.github/000077500000000000000000000000001415440646100163615ustar00rootroot00000000000000pytroll-schedule-0.6.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000006441415440646100210720ustar00rootroot00000000000000#### Code Sample, a minimal, complete, and verifiable piece of code ```python # Your code here ``` #### Problem description [this should also explain **why** the current behaviour is a problem and why the expected output is a better solution.] #### Expected Output #### Actual Result, Traceback if applicable #### Versions of Python, package at hand and relevant dependencies Thank you for reporting an issue ! pytroll-schedule-0.6.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000011301415440646100221550ustar00rootroot00000000000000 - [ ] Closes #xxxx - [ ] Tests added - [ ] Tests passed - [ ] Passes ``git diff origin/main **/*py | flake8 --diff`` - [ ] Fully documented pytroll-schedule-0.6.0/.github/workflows/000077500000000000000000000000001415440646100204165ustar00rootroot00000000000000pytroll-schedule-0.6.0/.github/workflows/ci.yaml000066400000000000000000000016641415440646100217040ustar00rootroot00000000000000name: Run tests on: - push - pull_request jobs: build: runs-on: ubuntu-latest strategy: fail-fast: true matrix: python-version: ["3.8", "3.9", "3.10"] experimental: [false] steps: - name: Checkout source uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | pip install -U pytest pytest-cov numpy pyresample pyorbital six pyyaml - name: Install pytroll-collectors run: | pip install --no-deps -e . - name: Run tests run: | pytest --cov=trollsched trollsched/tests --cov-report=xml - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: file: ./coverage.xml env_vars: PYTHON_VERSION pytroll-schedule-0.6.0/.gitignore000066400000000000000000000004731415440646100170150ustar00rootroot00000000000000*.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 __pycache__ # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject pytroll-schedule-0.6.0/.stickler.yml000066400000000000000000000002461415440646100174440ustar00rootroot00000000000000linters: flake8: python: 3 config: setup.cfg fixer: true fixers: enable: true files: ignore: - 'docs/Makefile' - 'docs/make.bat' pytroll-schedule-0.6.0/.travis.yml000066400000000000000000000032071415440646100171340ustar00rootroot00000000000000language: python env: global: # Set defaults to avoid repeating in most cases - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION - NUMPY_VERSION=stable - MAIN_CMD='python setup.py' - CONDA_DEPENDENCIES='sphinx cartopy scipy coveralls coverage codecov behave mock pycoast pydecorate six appdirs pykdtree pyresample docutils pyyaml matplotlib' - PIP_DEPENDENCIES='' - SETUP_XVFB=False - EVENT_TYPE='push pull_request cron' - SETUP_CMD='test' - CONDA_CHANNELS='conda-forge' - CONDA_CHANNEL_PRIORITY='strict' matrix: include: - env: PYTHON_VERSION=2.7 os: linux - env: PYTHON_VERSION=2.7 os: osx language: generic - env: PYTHON_VERSION=3.6 os: linux - env: PYTHON_VERSION=3.6 os: osx language: generic install: #- git clone --depth 1 git://github.com/astropy/ci-helpers.git - git clone --depth 1 -b all-the-fixes git://github.com/djhoese/ci-helpers.git - source ci-helpers/travis/setup_conda.sh # reactivate environment to set proj environment variables #- conda deactivate #- conda activate test script: coverage run --source=trollsched setup.py test after_success: - if [[ $PYTHON_VERSION == 3.6 ]]; then coveralls; fi # deploy: - provider: pypi user: adybbroe password: secure: SY0qo7sZXDjDx0DHvuXrHvL9VTAulgU/T33d6UWXf469jT9DOexuZ2VYLgJbYQen5FSe5JmQE0ZMdId1cb8IPP/77qCgQK6f0lRDa43fSYXhcD+fHzlQskievJrwamkRYx6WBrJbwGAKBNinUgNSaTdbh9XUugziGFiOHUfVppM= distributions: sdist bdist_wheel skip_existing: true on: tags: true repo: pytroll/pytroll-schedule notifications: slack: rooms: - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#pytroll-schedule pytroll-schedule-0.6.0/CHANGELOG.md000066400000000000000000000131101415440646100166260ustar00rootroot00000000000000## Version 0.6.0 (2021/12/09) ### Issues Closed * [Issue 62](https://github.com/pytroll/pytroll-schedule/issues/62) - Remove remnants of Python 2 support ([PR 67](https://github.com/pytroll/pytroll-schedule/pull/67) by [@pnuu](https://github.com/pnuu)) * [Issue 60](https://github.com/pytroll/pytroll-schedule/issues/60) - Deprecated import of Mapping * [Issue 59](https://github.com/pytroll/pytroll-schedule/issues/59) - Failures in Schedule tests ([PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) by [@pnuu](https://github.com/pnuu)) * [Issue 54](https://github.com/pytroll/pytroll-schedule/issues/54) - Deprecated use of abstract base classes ([PR 57](https://github.com/pytroll/pytroll-schedule/pull/57) by [@pnuu](https://github.com/pnuu)) * [Issue 53](https://github.com/pytroll/pytroll-schedule/issues/53) - The unittests are not run automatically ([PR 55](https://github.com/pytroll/pytroll-schedule/pull/55) by [@pnuu](https://github.com/pnuu)) * [Issue 52](https://github.com/pytroll/pytroll-schedule/issues/52) - Boundary calculations are broken ([PR 56](https://github.com/pytroll/pytroll-schedule/pull/56) by [@pnuu](https://github.com/pnuu)) * [Issue 49](https://github.com/pytroll/pytroll-schedule/issues/49) - Three unit tests failed. In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) - Allow `mersi-2` as instrument name ([59](https://github.com/pytroll/pytroll-schedule/issues/59)) * [PR 56](https://github.com/pytroll/pytroll-schedule/pull/56) - Remove a bug introduced in PR38 ([52](https://github.com/pytroll/pytroll-schedule/issues/52)) * [PR 51](https://github.com/pytroll/pytroll-schedule/pull/51) - Remove some redundant code and fix a failed unit test. * [PR 45](https://github.com/pytroll/pytroll-schedule/pull/45) - Use recent ssl protocol for older python versions * [PR 38](https://github.com/pytroll/pytroll-schedule/pull/38) - Fix S3 olci scan duration #### Features added * [PR 67](https://github.com/pytroll/pytroll-schedule/pull/67) - Refactor remove legacy code support ([62](https://github.com/pytroll/pytroll-schedule/issues/62)) * [PR 66](https://github.com/pytroll/pytroll-schedule/pull/66) - Change tested Python versions to 3.8, 3.9 and 3.10 * [PR 64](https://github.com/pytroll/pytroll-schedule/pull/64) - Use safe loading for YAML config file * [PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) - Allow `mersi-2` as instrument name ([59](https://github.com/pytroll/pytroll-schedule/issues/59)) * [PR 58](https://github.com/pytroll/pytroll-schedule/pull/58) - Fix a test failure on Python 3.7 * [PR 57](https://github.com/pytroll/pytroll-schedule/pull/57) - Fix an import raising deprecation warning ([54](https://github.com/pytroll/pytroll-schedule/issues/54)) * [PR 55](https://github.com/pytroll/pytroll-schedule/pull/55) - Add GitHub actions to run unittests ([53](https://github.com/pytroll/pytroll-schedule/issues/53)) * [PR 50](https://github.com/pytroll/pytroll-schedule/pull/50) - Add a southern hemisphere pass test. * [PR 46](https://github.com/pytroll/pytroll-schedule/pull/46) - Give the option to plot multiple polygons * [PR 45](https://github.com/pytroll/pytroll-schedule/pull/45) - Use recent ssl protocol for older python versions * [PR 44](https://github.com/pytroll/pytroll-schedule/pull/44) - Make plot filename more complete, including the instrument name * [PR 42](https://github.com/pytroll/pytroll-schedule/pull/42) - Make it possible to tell cartopy to use offline shapefiles * [PR 41](https://github.com/pytroll/pytroll-schedule/pull/41) - Fix nasa ftp retrieval * [PR 38](https://github.com/pytroll/pytroll-schedule/pull/38) - Fix S3 olci scan duration In this release 19 pull requests were closed. ## Version 0.5.2 (2019/03/19) ### Pull Requests Merged #### Bugs fixed * [PR 36](https://github.com/pytroll/pytroll-schedule/pull/36) - Add xarray to conda dependencies * [PR 35](https://github.com/pytroll/pytroll-schedule/pull/35) - Bugfix - when a set of sensors are provided choose avhrr if it is one of them * [PR 33](https://github.com/pytroll/pytroll-schedule/pull/33) - Bugfix avhrr naming In this release 3 pull requests were closed. ## Version 0.5.1 (2019/01/08) ### Issues Closed * [Issue 27](https://github.com/pytroll/pytroll-schedule/issues/27) - Drawing the ascat outline * [Issue 25](https://github.com/pytroll/pytroll-schedule/issues/25) - New version slower in generating the schedule ([PR 26](https://github.com/pytroll/pytroll-schedule/pull/26)) In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 26](https://github.com/pytroll/pytroll-schedule/pull/26) - Speed up schedule generation ([25](https://github.com/pytroll/pytroll-schedule/issues/25)) * [PR 24](https://github.com/pytroll/pytroll-schedule/pull/24) - Bugfix schedule generation #### Features added * [PR 29](https://github.com/pytroll/pytroll-schedule/pull/29) - Move save_fig import in under the save function * [PR 28](https://github.com/pytroll/pytroll-schedule/pull/28) - Restructure the get_next_passes putting some of the highly nested cod… * [PR 26](https://github.com/pytroll/pytroll-schedule/pull/26) - Speed up schedule generation ([25](https://github.com/pytroll/pytroll-schedule/issues/25)) * [PR 23](https://github.com/pytroll/pytroll-schedule/pull/23) - Versioneer In this release 6 pull requests were closed. ## Version 0.5.0 (2018/11/25) ### Issues Closed * [Issue 17](https://github.com/pytroll/pytroll-schedule/issues/17) - Plotting facility uses basemap ([PR 18](https://github.com/pytroll/pytroll-schedule/pull/18)) In this release 1 issue was closed. pytroll-schedule-0.6.0/LICENSE000066400000000000000000001044611415440646100160340ustar00rootroot00000000000000GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . pytroll-schedule-0.6.0/MANIFEST.in000066400000000000000000000000641415440646100165570ustar00rootroot00000000000000include versioneer.py include trollsched/version.py pytroll-schedule-0.6.0/README.md000066400000000000000000000016031415440646100163000ustar00rootroot00000000000000pytroll-schedule ================ [![Codacy Badge](https://api.codacy.com/project/badge/Grade/9f039d7d640846ca89be8a78fa11e1f6)](https://www.codacy.com/app/adybbroe/pytroll-schedule?utm_source=github.com&utm_medium=referral&utm_content=pytroll/pytroll-schedule&utm_campaign=badger) [![Build Status](https://travis-ci.org/pytroll/pytroll-schedule.png?branch=main)](https://travis-ci.org/pytroll/pytroll-schedule) [![Coverage Status](https://coveralls.io/repos/github/pytroll/pytroll-schedule/badge.svg?branch=main)](https://coveralls.io/github/pytroll/pytroll-schedule?branch=main) [![Code Health](https://landscape.io/github/pytroll/pytroll-schedule/main/landscape.png)](https://landscape.io/github/pytroll/pytroll-schedule/main) [![PyPI version](https://badge.fury.io/py/pytroll-schedule.svg)](https://badge.fury.io/py/pytroll-schedule) Reception scheduling of polar orbiting weather satellites pytroll-schedule-0.6.0/RELEASING.md000066400000000000000000000012021415440646100166470ustar00rootroot00000000000000# Releasing pytroll-schedule 1. checkout main branch 2. pull from repo 3. run the unittests 4. run `loghub` and update the `CHANGELOG.md` file: ``` loghub pytroll/pytroll-schedule -u -st v0.5.0 -plg bug "Bugs fixed" -plg enhancement "Features added" -plg documentation "Documentation changes" ``` Don't forget to commit! 5. Create a tag with the new version number, starting with a 'v', eg: ``` git tag v0.5.1 -m "Version 0.5.1" ``` See [semver.org](http://semver.org/) on how to write a version number. 6. push changes to github `git push --follow-tags` 7. Verify travis tests passed and deployed sdist and wheel to PyPI pytroll-schedule-0.6.0/changelog.rst000066400000000000000000000366451415440646100175200ustar00rootroot00000000000000Changelog ========= v0.4.0 (2018-11-08) ------------------- Fix ~~~ - Bugfix: Resolve import errors - a few things have been moved to pyresample. [Adam.Dybbroe] - Bugfix: setting the instrument dependent scan duration. [Adam.Dybbroe] - Bugfix: default instrument geometry function. [Adam.Dybbroe] - Bugfix: Instrument for Suomi NPP was not known. [Adam.Dybbroe] - Multi-proc only if stations>1 ; eval Aqua-dump with default URL. [Alexander Maul] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.3.3 → 0.4.0. [Martin Raspaud] - Merge pull request #16 from pytroll/debug-satpass. [Martin Raspaud] Debug satpass - Make sure indecies are integers. [Adam.Dybbroe] - Fix boundary import from pyresample. [Martin Raspaud] - Merge branch 'debug-satpass' of github.com:pytroll/pytroll-schedule into debug-satpass. [Adam.Dybbroe] - Use pyresample's boundary module. [Martin Raspaud] - Add unittest module for satpass testing. [Adam.Dybbroe] - Add unittests for swath boundary and swath coverage. [Adam.Dybbroe] - Remove "v" from versioning. [Adam.Dybbroe] - Bugfix bottom and top lons,lats affecting at least VIIRS. [Adam.Dybbroe] - Remove log info instrument Signed-off-by: Adam.Dybbroe [Adam.Dybbroe] - Merge branch 'master' into debug-satpass. [Adam.Dybbroe] - Merge pull request #11 from pytroll/feature-python3-support. [Panu Lahtinen] Support Python 3 - Encapsulate direct call of the files inside main() function. [Panu Lahtinen] - Remove leftover rebase marker. [Panu Lahtinen] - Merge branch 'feature-python3-support' of https://github.com/mraspaud /pytroll-schedule into feature-python3-support. [Panu Lahtinen] - Merge branch 'feature-python3-support' of https://github.com/mraspaud /pytroll-schedule into feature-python3-support. [Panu Lahtinen] - Fix urlparse imports. [Panu Lahtinen] - Remove unused import. [Panu Lahtinen] - Support Python 3. [Panu Lahtinen] - Fix urlparse imports. [Panu Lahtinen] - Remove unused import. [Panu Lahtinen] - Support Python 3. [Panu Lahtinen] - Support Python 3. [Panu Lahtinen] - Fix urlparse imports. [Panu Lahtinen] - Remove unused import. [Panu Lahtinen] - Support Python 3. [Panu Lahtinen] - Bugfix checking instrument name. [Adam.Dybbroe] - Pep8. [Adam.Dybbroe] - Fix sec_scan_duration for avhrr and ascat. [Adam.Dybbroe] - Use variable scan_step instead of frequency. [Adam.Dybbroe] - Check if instrument is a list. [Adam.Dybbroe] - A list of instruments is not allowed. Set to avhrr for the time being. [Adam.Dybbroe] - Bugfix. [Adam.Dybbroe] - Add more debug info... [Adam.Dybbroe] - Add debug info. [Adam.Dybbroe] - Merge pull request #15 from pytroll/debug-schedule-page-generation. [Adam Dybbroe] Debug schedule page generation - Bugfix viirs and getting the sides right. [Adam.Dybbroe] - Add more debug info. [Adam.Dybbroe] - Fix exception message and add debug printouts. [Adam.Dybbroe] - Merge pull request #14 from pytroll/bugfix-snpp. [Adam Dybbroe] Bugfix: Instrument for Suomi NPP was not known. - Merge pull request #12 from pytroll/feature-ascat. [Adam Dybbroe] Add support for ASCAT scan geometry - Bugfix, set the correct instrument when creating the Pass object. [Adam.Dybbroe] - Fix for Python3 and add number of FOVs for mhs and amsua. [Adam.Dybbroe] - Generalize to other instruments different from AVHRR. [Adam.Dybbroe] - Add support for ASCAT scan geometry. [Adam.Dybbroe] - Merge pull request #10 from pytroll/develop. [Martin Raspaud] Get rid of develop - Merge pull request #9 from pytroll/feature-oo. [Martin Raspaud] [WIP] Factorize code using OOP - Adapt legacy cfg reader to use the new classes. [Alexander Maul] - Make more code working in Py2.7 & Py3.4+. [Alexander Maul] - Update documentation for config file (plain->yaml). [Alexander Maul] - Remove surplus module function and parameter, both now in classes. [Alexander Maul] - Use AQUA/TERRA dump URL from yaml-cfg, add use_2to3 to setup. [Alexander Maul] - Merge branch 'feature-oo' of https://github.com/pytroll/pytroll- schedule.git into feature-oo. [Alexander Maul] - Ammend yaml-config/reader to set sat-scores per station (optional). [Alexander Maul] - Ammend yaml-config/reader to set sat-scores per station (optional). [Alexander Maul] - Change print() statements to proper logging in exception handling. [Alexander Maul] - Change combined_stations() for oo-rewrite, correct some typos for py3. [Alexander Maul] - Fix circular import. [Martin Raspaud] - Add support for olci instrument. [Martin Raspaud] - Add python 3 support. [Martin Raspaud] - Restore string as satellite for pass instatiation. [Martin Raspaud] - Change default mail sender. [Martin Raspaud] - Create some classes and use them. [Martin Raspaud] - Let open generate an error if the config file is missing. [Martin Raspaud] - Add more option to the drawing feature. [Martin Raspaud] - Bugfix for metop-a/b and noaa-20. [Adam.Dybbroe] - Correct NOAA-20 naming for TLEs, in case JPSS-1 is used instead of "NOAA 20" [Adam.Dybbroe] - Bugfix - exclude satellites. [Adam.Dybbroe] - Allow to exclude satellite platforms via the command line. [Adam.Dybbroe] - Fix for NOAA 20 / JPSS-1 naming. [Adam.Dybbroe] - Handle situations more gracefully with satellites for which we have no TLEs. And prepare for NOAA-20. [Adam.Dybbroe] - Fix center_id for combined schedules. [Alexander Maul] - Merge pull request #8 from pytroll/yaml-config. [Alex Maul] Yaml config with requested changes. - Merge branch 'develop' into yaml-config. [Alex Maul] - Make center_id a configuration item. [Martin Raspaud] - Update usage in docs. [Martin Raspaud] - Merge branch 'develop' of github.com:mraspaud/pytroll-schedule into develop. [Martin Raspaud] - Allow passing proj info to the mapper. [Martin Raspaud] - Update with changes according to PR reviewer's comments. [Alexander Maul] Change key "center" to "center_id" yaml-config, move lines evaluating new keys up in trollsched.utils. - Move center-ID to yaml-config. [Alexander Maul] - Add "minimum pass duration" and "dump host URL" to yaml config file. [Alexander Maul] - Merge pull request #7 from pytroll/develop. [Alex Maul] Include last commits into branch yaml-config. - Fix filename filter for Aqua/Terra transponder-off info files. [Alexander Maul] - Merge branch 'develop' into develop. [Alex Maul] - Merge pull request #4 from pytroll/develop. [Alex Maul] Update fork - Increase estimate for the size of the combined passes' graph. [Alexander Maul] - Move code collecting labels per tree-node into local function. [Alexander Maul] - Remove obsolete "to-do" comment. [Alexander Maul] - Improve __eq__ in SimplePass. [Alexander Maul] Now, passes are compared by name and orbit number if both are instances of Pass. Otherwise the old comparision is used. - Add missing import. [Alexander Maul] - Merge pull request #2 from pytroll/develop. [Alex Maul] update develop-branch at alexmaul from pytroll - Update docs/gitignore. [Alexander Maul] - Add information on Direct Broadcast Downlink Scheduling restrictions for EOS and Metop. [Adam.Dybbroe] - Add YAML reader for configuration file. [Alexander Maul] Both the old config-format and yaml should be supported. Also add a template for a yaml configuration file. - Yaml-config reader: start work. [alexmaul] v0.3.3 (2017-09-20) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.3.2 → 0.3.3. [Martin Raspaud] - Remove support for 2.6. [Martin Raspaud] - Add/change function to load/include info about TERRA dumps, in addition to AQUA dumps. [alexmaul] v0.3.2 (2017-08-18) ------------------- Fix ~~~ - Bugfix: Number of scans should be an integer. [Adam.Dybbroe] - Bugfix: Correct path to output png plots. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.3.1 → 0.3.2. [Martin Raspaud] - Merge remote-tracking branch 'origin/generate-schedule-pages' into develop. [Martin Raspaud] - Use TLES environment varaible. [Adam.Dybbroe] - Clean up and make sat names look nice on plots. [Adam.Dybbroe] - Add a schedule generator runner based on posttroll messaging. [Adam.Dybbroe] - Fix int division for py3. [Martin Raspaud] - Fix unit test test_normalize() [Panu Lahtinen] - Add IASI scan angle to instrument swath boundary calculations. [Panu Lahtinen] - Merge pull request #4 from alexmaul/develop. [Martin Raspaud] Develop - First draft on scheduler documentation. [Alexander Maul] - Fix typo in send_file() [Alexander Maul] - Collected all styles in head/style and fixed font-sizes. [Alexander Maul] - Always save xml-file in request-mode. [Alexander Maul] Even if report-mode is set in command-line arguments an xml-file in request-mode is created. Also the combined-request files are transfered with FTP, which is encapsuled in a new function. - Use div with css-positioning instead of sturdy tables. [Alexander Maul] - Create XSL for display of the aquisition-report in a browser. [Alexander Maul] - Version-compatible dictionary building list-comprehension. [Alexander Maul] - Fix missing setter. [Alexander Maul] - Merge pull request #1 from pytroll/develop. [Alex Maul] Sync Develop to fork - Corrected last typos. [Alexander Maul] - Merge branch 'multiple_stations' into develop. [Alexander Maul] # Conflicts: # trollsched/schedule.py - All test work fine, prepare for PR. [Alexander Maul] - Passes handling and other re-works finished. [Alexander Maul] - Intermediate commit. [Alexander Maul] - Intermediate commit. combining three stations works. [Alexander Maul] - Last quirks removed ... intense testing follows. [Alexander Maul] - Intermediate commit to backup work. [Alexander Maul] - Intermediate commit. [Alexander Maul] it's working. generated path is indeed more optimal. - Combining weighted trees works so far. not ready for pull-req. [Alexander Maul] - Schedule.py : remove unneccessary writing to temp-file. [Alexander Maul] combine.py : just saving some development, nothing ready yet. - Most of the changes from workshop. Starts branch in own fork. [Alexander Maul] - Intergrate changes from WS. [Alexander Maul] - Index on develop: 40a9016 Add avoid_list feature. [Alexander Maul] v0.3.1 (2016-12-08) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.3.0 → 0.3.1. [Martin Raspaud] - Bugfix in spherical. [Martin Raspaud] v0.3.0 (2016-10-27) ------------------- Changes ~~~~~~~ - Allow to pass a parameter to modpi. [Martin Raspaud] Fix ~~~ - Bugfix: Don't duplicate points in decimating boundaries. [Martin Raspaud] - Bugfix: rec flag cannot be bool for xml. [Martin Raspaud] - Bugfix: save rec status, not image link... [Martin Raspaud] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.2.2 → 0.3.0. [Martin Raspaud] - Add bump and changelog configs. [Martin Raspaud] - Simplify version management. [Martin Raspaud] - Make boundary a property for lazy computation. [Martin Raspaud] - Create uptime on the fly if not provided. [Martin Raspaud] - Add avoid_list feature. [Martin Raspaud] - Fix ftp retrieval of aqua downlink schedule. [Martin Raspaud] Ftplib raises an error_perm sometime. It is now catched and handled correctly. - Take into account the 'start' conf parameter. [Martin Raspaud] The 'start' config parameters aims at skipping the first passes of the schedule in order to avoid changing the next scheduled pass. It was unfortunately not being used at all. This patch fixes the code to the right behaviour. - More debug info. [Martin Raspaud] - Don't put whitespaces in plot filenames. [Martin Raspaud] - Bugfixes and cleanup. [Martin Raspaud] - Bugfix the bugfix. [Martin Raspaud] - Merge pull request #3 from mraspaud/revert-2-develop. [Martin Raspaud] Revert "Change instrument from avhrr to avhrr/3" - Revert "Change instrument from avhrr to avhrr/3" [Martin Raspaud] - Merge pull request #2 from pnuu/develop. [Martin Raspaud] Change instrument from avhrr to avhrr/3 - Change instrument from avhrr to avhrr/3. [Panu Lahtinen] - Merge pull request #1 from pnuu/simplified_platforms. [Martin Raspaud] Removed platform name to TLE translation - Removed platform name to TLE translation. [Panu Lahtinen] - Fix the case when last vertex of intersection was last vertex of polygon. [Martin Raspaud] - Add setup.cfg for easy rpm generation. [Martin Raspaud] - More spherical tests. [Martin Raspaud] - Append tests to the test suite. [Martin Raspaud] - Add a few test to spherical geometry. [Martin Raspaud] - Add lons and lats to boundary init arguments. [Martin Raspaud] - A None intersection now returns an area of 0. [Martin Raspaud] - Update unittests to reflect structure changes. [Martin Raspaud] - Put an example cfg in the base directory. [Martin Raspaud] - Reorganizing. [Martin Raspaud] - Shorter, more effective filenames for plots. [Martin Raspaud] - Bugfix default xml location. [Martin Raspaud] - Bugfix report function. [Martin Raspaud] - Add reference area in plots. [Martin Raspaud] - Add xml declarations for report mode. [Martin Raspaud] - Add xml report mode. [Martin Raspaud] - Make the graph option an input directory. [Martin Raspaud] - Add option to generate pass plots. [Martin Raspaud] v0.2.2 (2014-06-02) ------------------- - Bump up version number. [Martin Raspaud] - Sort passes to avoid conflicts. [Martin Raspaud] - Add export method to graph. [Martin Raspaud] - Fix backward compatibility issue with numpy. [Martin Raspaud] - Refactorize, putting passes stuff in separate module. [Martin Raspaud] v0.2.1 (2014-05-27) ------------------- Fix ~~~ - Bugfix: wrong sorting of passes leaded to conflicting schedules. [Martin Raspaud] Other ~~~~~ - Bump up version number. [Martin Raspaud] - Make compare callable (as compare_scheds) [Martin Raspaud] - Add the confirmation option to the compare script. [Martin Raspaud] - Cleaning up. [Martin Raspaud] - Add pykdtree to travis dependencies. [Martin Raspaud] v0.2.0 (2014-05-20) ------------------- - Bump up version number. [Martin Raspaud] - Add option to compare the most recent requests to a confirmation. [Martin Raspaud] - Save xml data to temporary file first. [Martin Raspaud] - Refine station list. [Martin Raspaud] - Add request/confirmation comparison. [Martin Raspaud] - Remove dependency to scipy, and cleanup. [Martin Raspaud] - Start the schedule a little before to make sure we don't start in the middle of a conflict. [Martin Raspaud] - Added the glob dependency. [Martin Raspaud] - If ftp can't be reached for aqua dumps, use cached data. [Martin Raspaud] - Fix ftp export of xml file. [Martin Raspaud] - Fix xml file ftp push. [Martin Raspaud] - Add mail option to send errors by mail. [Martin Raspaud] - Smallest passes allowed are 4 minutes long. [Martin Raspaud] - Fix spherical intersection search. [Martin Raspaud] - Run on euron1. [Martin Raspaud] - Fix bug on intersection, where start of arc was the intersection. [Martin Raspaud] - Added Bochum station. [Martin Raspaud] - Added possibility to upload xmlfile to ftp. [Martin Raspaud] - Add downloading of aqua dump times. [Martin Raspaud] - Fix xml generation call. [Martin Raspaud] - Add a few options in the config file. [Martin Raspaud] - Use xml instead of lxml in the main xml generation function. [Martin Raspaud] - Bugfix in installation requirements. [Martin Raspaud] - Remove mpop from dependencies. [Martin Raspaud] - Adding docs. [Martin Raspaud] - Add atlas installation on travis. [Martin Raspaud] - Added missing dependencies. [Martin Raspaud] - Fixing travis. [Martin Raspaud] - Renamed a few things to avoid -_ problems. [Martin Raspaud] - Initial commit. [Martin Raspaud] - Initial commit. [Martin Raspaud] pytroll-schedule-0.6.0/codecov.yml000066400000000000000000000000001415440646100171540ustar00rootroot00000000000000pytroll-schedule-0.6.0/docs/000077500000000000000000000000001415440646100157515ustar00rootroot00000000000000pytroll-schedule-0.6.0/docs/.gitignore000066400000000000000000000000111415440646100177310ustar00rootroot00000000000000/_build/ pytroll-schedule-0.6.0/docs/Makefile000066400000000000000000000110261415440646100174110ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pytroll-schedule.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pytroll-schedule.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/pytroll-schedule" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pytroll-schedule" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." pytroll-schedule-0.6.0/docs/about.rst000066400000000000000000000076221415440646100176240ustar00rootroot00000000000000.. about_ About PyTroll-Schedule ====================== Case of One Receiving Station ----------------------------- In the case of a single station, the procedure of scheduling is quite straightforward. However, let us describe it in detail here, such that the background will be set for the more complex case of multiple reception station reception scheduling. The first step to compute the schedule, is to know which satellites of interest are going to be rising above the horizon during the duration of the schedule. In order to find such cases, we retrieve the orbital information for each satellite of interest and apply orbit prediction using the aiaa sgp4 algorithm (ref). In practice, we use norad tle files (ref) as orbital elements, and the python implementation of the sgp4 algorithm provided in pyorbital (ref). From this, we then obtain a list of the coming overpasses for the station. We define an overpass as a risetime and fall time for a given satellite, during which it will be within reception reach of the station. Now, we have to find the possible schedules for the station. The set of all overpasses gives us all the reception possibilities for the station. However, many of them will be in conflict with at least one other overpass and will be a concurrent to the reception race. We say that two overpasses conflict when the risetime dog one of them is comprised within the view time of the second. In case of conflicts, the scheduling algorithm has to choose one or the other overpass. However, in the case of several overpasses conflicting sequentially, we have to find the possible paths through the conflicting zone. In order to do that, we will use graph theory algorithms. We define the graph of the conflicting zone with overpasses as vertices and create an edge between two conflicting overpasses. To find the possible non-conflicting combinations in this graph is actually searching for maximal cliques in the complementary graph, for which we use the Bron-Kerbosch algorithm. #illustration click we obtain thus groups of passes that are not conflicting in the time frame. The next step is to find the optimal list of non conflicting passes under the duration on the schedule. Cases of Connected Stations --------------------------- There are several ways to computate schedules for connected stations, two are implemented in this program. Several points should be considered: * Technical equipement, reception of L-band, Ku-band, X-band? * Geographic location, nearby or large distance between? "Master-Slave" Operation ************************ The mode of master-slave is best suited for two stations, located next to each other, with similar technical systems. In this case a schedule for one, namely the "master" station, would be computed, as if it were only this one station. In a second step this schedule plan is used as a substraction list when computing the schedule for the second, the "slave" station. Co-operating Stations ********************* A mode of co-operating stations can consider the distance between different geographical locations and differences in technical equipement, most notable different reception capabilities (X- & L-band vs. L-band). In this case, each station defines a time span requirement for each pass. Then, if a connected station can fulfil this requirement and is scheduling the same pass, we can say that the stations are redundant. To avoid such redundancy, we can define ways to synchronise the schedule to optimise the intake of data and fulfil the pareto condition. A simple protocol can be used to perform this: both A and B provide alternatives and compute the enhanced score for the schedule including the others pass. B can delegate the pass only if it can assure that the time span requirement of A is respected. This operation can be extended to more than two stations, all receiving a single-operation schedule and an individual cooperating-schedule. pytroll-schedule-0.6.0/docs/conf.py000066400000000000000000000161231415440646100172530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # pytroll-schedule documentation build configuration file, created by # sphinx-quickstart on Mon Feb 24 23:43:03 2014. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.pngmath'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'pytroll-schedule' copyright = u'2014, Martin Raspaud' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1.0' # The full version, including alpha/beta/rc tags. release = '0.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pytroll-scheduledoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'pytroll-schedule.tex', u'pytroll-schedule Documentation', u'Martin Raspaud', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'pytroll-schedule', u'pytroll-schedule Documentation', [u'Martin Raspaud'], 1) ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} pytroll-schedule-0.6.0/docs/config.rst000066400000000000000000000123751415440646100177600ustar00rootroot00000000000000Configuration ============= Two formats are recognized for the configuration files -- the plain "group/key/value" format (parsed by module ConfigReader) and the now prefered YAML-style. .. note:: .. deprecated:: 0.3.4 The plain config format (.cfg) is deprecated, all config files should be changed to the YAML style. Although here the YAML style is described, it also fits the plain config style. The configuration file is divided in several section, each titled with a name in square brackets. All sections are filled with key:value pairs. Required sub-sections are those in sections ``default`` and ``pattern``, all others are referenced by the station list in section ``default`` and the satellites lists in the sub-sections per station. Main section "default" ------------------------ :: default: center_id: DWD-OF station: - nrk - ofb forward: 12 start: 0.5 min_pass: 4 dump_url: "ftp://is.sci.gsfc.nasa.gov/ancillary/ephemeris/schedule/%s/downlink/" ``center_id`` Name/ID for centre/org creating schedules. ``station`` IDs of the stations to make a schedule for. ``forward`` The timespan in hours the schedule should cover. ``start`` Time offset between the time of computing and start of the schedule. ``min_pass`` Minimum satellite pass duration (in minutes). ``dump_url`` FTP URL, where to retrieve information about the data dump for AQUA and TERRA. File- and directory pattern --------------------------- Each of the keys in this section can be referenced from within other lines in this section. .. note:: Be carefull not to create loops! :: pattern: dir_output: {output_dir}/{date}-{time} dir_plots: {dir_output}/plots.{station} file_xml: {dir_output}/aquisition-schedule-{mode}_{station}.xml file_sci: {dir_output}/scisys-schedule-{station}.txt file_graph: {dir_output}/graph.{station} ``time``, ``date`` Time+date, when the computation started. ``station`` Replaced with the station name. For co-operating stations "combined schedule" the string ".comb" is appended. ``mode`` If a schedule file was created in `request` mode or in `report` mode. The first one is the format accepted by SciSYS software, while `report` mode is good for monitoring purposes. ``output_dir`` This placeholder references the command-line argument ``-o OUTPUT_DIR``. ``dir_output`` This key is used only within this section. ``dir_plots`` Where the plots (globe grafic files) are saved. ``file_xml`` Path and filename format for xml-schedule (request and report). ``file_sci`` Path and filename for schedule file in SciSYS format. ``file_graph`` Graph files with information about the computation. Stations -------- :: stations: ofb: name: "DWD Offenbach" longitude: 8.747073 latitude: 50.103095 altitude: 0.140 area_file: /home/troll/etc/areas.def area: euro4 satellites: metop-a: night: 0.1 day: 0.1 metop-b: night: 0.1 day: 0.1 noaa 19: noaa 18: noaa 15: aqua: terra: suomi npp: ``name`` Name of the station. ``longitude`` Longitude in degrees east. ``latitude`` Longitude in degrees north. ``altitude`` Altitude above mean sea level, in km. ``area_file``, ``area`` File with area definitions, and the area referenced therein. This area is taken into computation, only satellite passes which swaths are cross-sectioning this area are considered for scheduling. ``satellites`` Satellites receivable from this station. The listed names may refer to the satellite sections. If the satellites are listed as a key/value dictionary, the day/night weight values (scores) can be set per satellite as in the section ``satellites``, described below. If no sub-keys are given, the scores from the section ``satellites`` are used. Alternativly the satellites can be listed as a list, as shown in the following example for station "nrk". In this case all names refer to the section ``satellites``. :: stations: nrk: name: Norrkoeping longitude: 16.148649 latitude: 58.581844 altitude: 0.052765 area_file: /home/troll/etc/areas.def area: euron1 satellites: - metop-a - metop-b - noaa 19 - noaa 18 - noaa 15 - aqua - terra - suomi npp While the above example sub-section contained values for the station Offenbach/Germany, this sub-section has values for Norkoepping/Sweden. Satellites ---------- :: satellites: metop-a: night:0.1 day:0.6 noaa 19: night:0.05 day:0.3 terra: night:0.2 day:0.8 suomi npp: night:0.25 day:0.9 A few examples for satellite sections, assigning scores to each satellite. ``night`` Weight value for satellite swath parts on the night-side of the terminator. ``day`` Weight value for satellite swath parts on the day-side of the terminator. pytroll-schedule-0.6.0/docs/example.rst000066400000000000000000000022161415440646100201370ustar00rootroot00000000000000Example ======= This Bash-script shows an example how to use the schedule script:: #!/usr/bin/bash . $HOME/.bash_profile bin=$HOME/.local/bin/schedule conf=$PYTROLL_BASE/etc-schedule logp=$PYTROLL_BASE/log/scheduler.log logc=$PYTROLL_BASE/log/create.log odir=$PYTROLL_BASE/schedules cfgfile=$1 shift # report-mode with plots mode="-r -p" # min time-diff btwn passes delay="90" # create output for scisys sci="--scisys" # dont include aqua-dumps aqua="--no-aqua-dump" # write gv-files for dot graph="-g" # exit if no new tle (( `ls $PYTROLL_BASE/tle/*.tle 2>/dev/null|wc -l` > 0 )) || exit 0 # catch newest TLE-file, remove others tle=`ls -t $PYTROLL_BASE/tle/*.tle|head -1` mv $tle $tle.txt rm -f $PYTROLL_BASE/tle/*.tle # settings for TLE-check satlst="NOAA 15|NOAA 18|NOAA 19|METOP-A|METOP-B|AQUA|SUOMI|TERRA " satcnt=8 to_addr=pytroll@schedule # check if TLE-file is complete if (( `grep -P "$satlst" $tle.txt|tee .tle_grep|wc -l` != $satcnt )); then exit 0 else tle="-t $tle.txt" fi # start schedule script $bin -v -l $logp -c $conf/$cfgfile $tle -d $delay -o $odir $graph $mode $sci $aqua $@ pytroll-schedule-0.6.0/docs/index.rst000066400000000000000000000011271415440646100176130ustar00rootroot00000000000000.. pytroll-schedule documentation master file, created by sphinx-quickstart on Mon Feb 24 23:43:03 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to pytroll-schedule's documentation! ============================================ Reception scheduling of polar weather satellites. Contents: .. toctree:: :maxdepth: 2 about miscellaneous .. toctree:: :maxdepth: 1 usage config example Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` pytroll-schedule-0.6.0/docs/install.rst000066400000000000000000000024431415440646100201540ustar00rootroot00000000000000Installation ============ mkdir $HOME/PyTROLL cd !$ mkdir data-in data-out etc #dnf install pyshp python-configobj numpy numpy-f2py scipy python-numexpr \ #python-pillow proj proj-epsg proj-nad pyproj python2-matplotlib python-basemap # wget https://github.com/pytroll/mipp/archive/master.zip -O mipp-master.zip wget https://github.com/pytroll/mpop/archive/pre-master.zip -O mpop-pre-master.zip wget https://github.com/pytroll/pyresample/archive/master.zip -O pyresample-master.zip wget https://github.com/adybbroe/python-geotiepoints/archive/master.zip -O python-geotiepoints-master.zip wget https://github.com/pytroll/pycoast/archive/master.zip -O pycoast-master.zip wget https://github.com/pytroll/pyorbital/archive/master.zip -O pyorbital-master.zip wget https://github.com/pytroll/trollcast/archive/master.zip -O trollcast-master.zip wget https://github.com/pytroll/pytroll-schedule/archive/master.zip -O pytroll-schedule-master.zip wget https://github.com/pytroll/trollduction/archive/master.zip -O trollduction-master.zip for ff in *.zip ; do unzip -u $ff cd $(basename $ff .zip) python setup.py build python setup.py install --user done cat < $HOME/.pytroll.rc pytroll-schedule-0.6.0/docs/make.bat000066400000000000000000000106631415440646100173640ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pytroll-schedule.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pytroll-schedule.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end pytroll-schedule-0.6.0/docs/miscellaneous.rst000066400000000000000000000043041415440646100213470ustar00rootroot00000000000000Accounting for specific DR restrictions ======================================= For some satellites and some instruments direct readout is not available constantly. This is the case for instance for MODIS onboard the EOS Terra and Aqua satellites. Aqua and Terra dumps -------------------- During the time when the EOS satellites make their global dump there is no Direct Broadcast available for Direct Readout users. In order to disregard the scheduling of local reception of data which will not be available due to these restrictions the NASA Direct Readout Lab publish Terra and Aqua Direct Broadcast Downlink Scheduling Information regularly. The Pytroll-schedule take these reports into account when deriving the local schedule. The ftp adressses are: - ftp://is.sci.gsfc.nasa.gov/ancillary/ephemeris/schedule/aqua/downlink/ - ftp://is.sci.gsfc.nasa.gov/ancillary/ephemeris/schedule/terra/downlink/ .. Earth Observing System Polar Ground Network (EPGN) EPGN ground stations: Alaska Ground Station (AGS), Poker Flat, the Svalbard Satellite Station (SGS), the Kongsberg–Lockheed Martin ground station (SKS), and the SvalSat ground station (SG3) in Norway, as well as the SSC North Pole facility. SSC's North Pole Facility hosting the two antennas USAK04 & USAK05 (part of EPGN) Metop ----- Due to a broken A-side of the Metop-A HRPT downlink module only the B-side is operable on the Metop-A spacecraft. The A-side failure was due to a malfunctioning transistor that proved to be sensitive to cosmic radiation. The same transistor is available on the B-side and therefore EUMETSAT has implemented a restricted operation of the B-side HRPT downlink module. This means no HRPT direct broadcast is available during parts of the Metop-A orbit (e.g. over the North Pole and around the South Atlantic Anamoly). For direct readout users EUMETSAT regularly (several times per day) publish an HRPT on/off schedule in XML format at the following URL: - http://oiswww.eumetsat.org/uns/webapps/index.html?type=17 This will be read by Pytroll-schedule in order to disregard Metop-A where there will be no Direct Readout available over the relavnt local reception station. NOT YET IMPLEMENTED! .. FIXME! pytroll-schedule-0.6.0/docs/usage.rst000066400000000000000000000042671415440646100176200ustar00rootroot00000000000000Usage ===== Usage of the schedule script:: usage: schedule [-h] [-c CONFIG] [-t TLE] [-l LOG] [-m [MAIL [MAIL ...]]] [-v] [--lat LAT] [--lon LON] [--alt ALT] [-f FORWARD] [-s START_TIME] [-d DELAY] [-a AVOID] [--no-aqua-terra-dump] [--multiproc] [-o OUTPUT_DIR] [-u OUTPUT_URL] [-x] [-r] [--scisys] [-p] [-g] optional arguments: -h, --help show this help message and exit -c CONFIG, --config CONFIG configuration file to use -t TLE, --tle TLE tle file to use -l LOG, --log LOG File to log to (defaults to stdout) -m [MAIL [MAIL ...]], --mail [MAIL [MAIL ...]] mail address(es) to send error messages to. -v, --verbose print debug messages too start-parameter: (or set values in the configuration file) --lat LAT Latitude, degrees north --lon LON Longitude, degrees east --alt ALT Altitude, km -f FORWARD, --forward FORWARD time ahead to compute the schedule -s START_TIME, --start-time START_TIME start time of the schedule to compute -d DELAY, --delay DELAY delay (in seconds) needed between two consecutive passes (60 seconds by default) special: (additional parameter changing behaviour) -a AVOID, --avoid AVOID xml request file with passes to avoid --no-aqua-terra-dump do not consider Aqua/Terra-dumps --multiproc use multiple parallel processes output: (file pattern are taken from configuration file) -o OUTPUT_DIR, --output-dir OUTPUT_DIR where to put generated files -u OUTPUT_URL, --output-url OUTPUT_URL URL where to put generated schedule file(s), otherwise use output-dir -x, --xml generate an xml request file (schedule) -r, --report generate an xml report file (schedule) --scisys generate a SCISYS schedule file -p, --plot generate plot images -g, --graph save graph info pytroll-schedule-0.6.0/generate_schedule_xmlpage.py000066400000000000000000000144441415440646100225650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016, 2018, 2019 Adam.Dybbroe # Author(s): # Adam.Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """From a schedule request xml file generate png's with swath coverage outline and an xml page for visualisation. It uses posttroll to listen for incoming schedule request xml files and then triggers the png and xml output generation. """ import logging import sys import os from six.moves.configparser import RawConfigParser from six.moves.urllib.parse import urlparse import posttroll.subscriber from posttroll.publisher import Publish import xml.etree.ElementTree as ET from datetime import datetime import os.path from trollsched.satpass import Pass from trollsched.drawing import save_fig from trollsched import (SATELLITE_NAMES, INSTRUMENT) LOG = logging.getLogger(__name__) CFG_DIR = os.environ.get('PYTROLL_SCHEDULE_CONFIG_DIR', './') CONF = RawConfigParser() CFG_FILE = os.path.join(CFG_DIR, "pytroll_schedule_config.cfg") LOG.debug("Config file = " + str(CFG_FILE)) if not os.path.exists(CFG_FILE): raise IOError('Config file %s does not exist!' % CFG_FILE) CONF.read(CFG_FILE) OPTIONS = {} for option, value in CONF.items("DEFAULT"): OPTIONS[option] = value #: Default time format _DEFAULT_TIME_FORMAT = '%Y-%m-%d %H:%M:%S' #: Default log format _DEFAULT_LOG_FORMAT = '[%(levelname)s: %(asctime)s : %(name)s] %(message)s' def process_xmlrequest(filename, plotdir, output_file, excluded_satellites): tree = ET.parse(filename) root = tree.getroot() for child in root: if child.tag == 'pass': LOG.debug("Pass: %s", str(child.attrib)) platform_name = SATELLITE_NAMES.get(child.attrib['satellite'], child.attrib['satellite']) instrument = INSTRUMENT.get(platform_name) if not instrument: LOG.error('Instrument unknown! Platform = %s', platform_name) continue if platform_name in excluded_satellites: LOG.debug('Platform name excluded: %s', platform_name) continue try: overpass = Pass(platform_name, datetime.strptime(child.attrib['start-time'], '%Y-%m-%d-%H:%M:%S'), datetime.strptime(child.attrib['end-time'], '%Y-%m-%d-%H:%M:%S'), instrument=instrument) except KeyError as err: LOG.warning('Failed on satellite %s: %s', platform_name, str(err)) continue save_fig(overpass, directory=plotdir) child.set('img', overpass.fig) child.set('rec', 'True') LOG.debug("Plot saved - plotdir = %s, platform_name = %s", plotdir, platform_name) tree.write(output_file, encoding='utf-8', xml_declaration=True) with open(output_file) as fpt: lines = fpt.readlines() lines.insert( 1, "") with open(output_file, 'w') as fpt: fpt.writelines(lines) def start_plotting(jobreg, message, **kwargs): """Read the xmlschedule request file and make the png images of swath outlines and generate the output xml file for web publication """ excluded_satellites = kwargs.get('excluded_satellites', []) LOG.info("") LOG.info("job-registry dict: " + str(jobreg)) LOG.info("\tMessage:") LOG.info(message) urlobj = urlparse(message.data['uri']) # path, fname = os.path.split(urlobj.path) process_xmlrequest(urlobj.path, OPTIONS['path_plots'], OPTIONS['xmlfilepath'], excluded_satellites) return jobreg def schedule_page_generator(excluded_satellite_list=None): """Listens and triggers processing""" LOG.info( "*** Start the generation of the schedule xml page with swath outline plots") with posttroll.subscriber.Subscribe('', [OPTIONS['posttroll_topic'], ], True) as subscr: with Publish('schedule_page_generator', 0) as publisher: job_registry = {} for msg in subscr.recv(): job_registry = start_plotting( job_registry, msg, publisher=publisher, excluded_satellites=excluded_satellite_list) # Cleanup in registry (keep only the last 5): keys = job_registry.keys() if len(keys) > 5: keys.sort() job_registry.pop(keys[0]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-x", "--excluded_satellites", nargs='*', help="List of platform names to exclude", default=[]) opts = parser.parse_args() no_sats = opts.excluded_satellites handler = logging.StreamHandler(sys.stderr) handler.setLevel(logging.DEBUG) formatter = logging.Formatter(fmt=_DEFAULT_LOG_FORMAT, datefmt=_DEFAULT_TIME_FORMAT) handler.setFormatter(formatter) logging.getLogger('').addHandler(handler) logging.getLogger('').setLevel(logging.DEBUG) logging.getLogger('posttroll').setLevel(logging.INFO) LOG = logging.getLogger('schedule_page_generator') LOG.info("Exclude the following satellite platforms: %s", str(no_sats)) schedule_page_generator(no_sats) # uri = "/data/temp/AdamD/xxx/2018-10-22-00-42-28-acquisition-schedule-confirmation-nrk.xml" # urlobj = urlparse(uri) # process_xmlrequest(urlobj.path, # OPTIONS['path_plots'], OPTIONS['xmlfilepath'], no_sats) pytroll-schedule-0.6.0/reqreader.xsl000066400000000000000000000072541415440646100175330ustar00rootroot00000000000000 Ăœberflugplan

Ăœberflugplan

Erstellt von: um:

Antenne:

Pass Satellit Datum Anfang Ende
(Click auf Ăœberflug zeigt Plot)
javascript:switcher('plots.') rec_true rec_false
pytroll-schedule-0.6.0/scheduler.cfg_template000066400000000000000000000031741415440646100213600ustar00rootroot00000000000000[default] station=nrk,bch #7 days forward=168 start=2 # dir/file pattern, follows rules of str.format() # pre-set names: # {date}, time}, # {station} (as in [station]->name, for combined schedules ".comb" is appended), # {mode} (request|report), # {output_dir} (as in cmd-param --output-dir) [pattern] dir_output= {output_dir}/{date}-{time} dir_plots= {dir_output}/plots.{station} file_xml= {dir_output}/aquisition-schedule-{mode}_{station}.xml file_sci= {dir_output}/scisys-schedule-{station}.txt file_graph= {dir_output}/graph [nrk] name=nrk # degrees east longitude=16.148649 # degrees north latitude=58.581844 # km altitude=0.052765 #area_file=/home/sat/smhi/scheduler/areas.def area_file=/local_disk/usr/src/satprod/etc/areas.def area=euron1 satellites=metop-a,metop-b,noaa 19,noaa 18,noaa 15,aqua,terra,suomi npp #satellites=noaa 16,terra [nrk-test] name=nrk # degrees east longitude=16.148649 # degrees north latitude=58.581844 # km altitude=0.052765 area_file=/home/sat/smhi/scheduler_test/areas.def area=euron1 satellites=metop-a,metop-b,noaa 19,noaa 18,noaa 15,aqua,terra,suomi npp #satellites=noaa 16,terra [bch] name=bch # degrees east longitude=7.314910 # degrees north latitude=51.505070 # km altitude=0.14 area_file=/local_disk/usr/src/satprod/etc/areas.def area=euron1 satellites=metop-a,metop-b,noaa 19,noaa 18,noaa 15,aqua,terra,suomi npp #satellites=noaa 16,terra [metop-a] night=0.1 day=0.6 [metop-b] night=0.1 day=0.6 [noaa 19] night=0.05 day=0.3 [noaa 18] night=0.1 day=0.6 [noaa 16] night=0.1 day=0.6 [noaa 15] night=0.05 day=0.3 [aqua] night=0.2 day=0.8 [terra] night=0.2 day=0.8 [suomi npp] night=0.25 day=0.9 pytroll-schedule-0.6.0/scheduler.yaml_template000066400000000000000000000055001415440646100215560ustar00rootroot00000000000000default: center_id: SMHI station: - nrk - bch # schedule starts in 2 hours and covers 7 days forward: 168 start: 2 # minimum observable pass length [minutes] min_pass: 4 # URL for Aqua/Terra dump information dump_url: "ftp://is.sci.gsfc.nasa.gov/ancillary/ephemeris/schedule/%s/downlink/" pattern: # dir/file pattern, follows rules of str.format() # pre-set names: # {date}, {time} : date+time # {station} : as in station->name, for combined schedules ".comb" is appended, # {mode} : request|report, # {output_dir} : as in cmd-param --output-dir dir_output: "{output_dir}/{date}-{time}" dir_plots: "{dir_output}/plots.{station}" file_xml: "{dir_output}/aquisition-schedule-{mode}_{station}.xml" file_sci: "{dir_output}/scisys-schedule-{station}.txt" file_graph: "{dir_output}/graph" stations: nrk: name: nrk # degrees east longitude: 16.148649 # degrees north latitude: 58.581844 # km altitude: 0.052765 area_file: /home/amaul/PyTROLL/etc/areas.def area: euron1 satellites: - metop-a - metop-b - noaa 19 - noaa 18 - noaa 15 - aqua - terra - suomi npp nrk-test: name: nrk # degrees east longitude: 16.148649 # degrees north latitude: 58.581844 # km altitude: 0.052765 area_file: /home/amaul/PyTROLL/etc/areas.def area: euron1 satellites: metop-a : night: 0.1 day: 0.6 metop-b : night: 0.1 day: 0.6 noaa 19 : noaa 18 : noaa 15 : aqua : terra : suomi npp : bch: name: bch # degrees east longitude: 7.314910 # degrees north latitude: 51.505070 # km altitude: 0.14 area_file: /home/amaul/PyTROLL/etc/areas.def area: euron1 satellites: - metop-a - metop-b - noaa 19 - noaa 18 - noaa 15 - aqua - terra - suomi npp satellites: metop-a: night: 0.1 day: 0.6 metop-b: night: 0.1 day: 0.6 noaa 19: night: 0.05 day: 0.3 noaa 18: night: 0.1 day: 0.6 noaa 16: night: 0.1 day: 0.6 noaa 15: night: 0.05 day: 0.3 aqua: night: 0.2 day: 0.8 terra: night: 0.2 day: 0.8 suomi npp: night: 0.25 day: 0.9 pytroll-schedule-0.6.0/setup.cfg000066400000000000000000000007621415440646100166470ustar00rootroot00000000000000[bdist_rpm] requires=numpy pyresample pyorbital pyyaml release=1 # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] VCS = git style = pep440 versionfile_source = trollsched/version.py versionfile_build = tag_prefix = v parentdir_prefix = [bdist_wheel] universal=1 [flake8] max-line-length = 120 [coverage:run] omit = trollsched/version.py versioneer.py pytroll-schedule-0.6.0/setup.py000066400000000000000000000041611415440646100165350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 - 2019 PyTroll Community # Author(s): # Martin Raspaud # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """ """ # workaround python bug: http://bugs.python.org/issue15881#msg170215 import multiprocessing from setuptools import setup import sys import versioneer requires = ['numpy', 'pyresample', 'pyorbital', 'pyyaml'] test_requires = [] setup(name='pytroll-schedule', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='Scheduling satellite passes in Python', author='Martin Raspaud', author_email='martin.raspaud@smhi.se', classifiers=["Development Status :: 4 - Beta", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + "or later (GPLv3+)", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Astronomy"], test_suite='trollsched.tests.suite', entry_points={ 'console_scripts': ['schedule = trollsched.schedule:run', 'compare_scheds = trollsched.compare:run']}, scripts=['generate_schedule_xmlpage.py'], packages=['trollsched'], tests_require=test_requires, install_requires=requires, zip_safe=False, ) pytroll-schedule-0.6.0/trollsched/000077500000000000000000000000001415440646100171645ustar00rootroot00000000000000pytroll-schedule-0.6.0/trollsched/__init__.py000066400000000000000000000041361415440646100213010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 - 2019 PyTroll Community # Author(s): # Martin Raspaud # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Package file. """ from .version import get_versions __version__ = get_versions()['version'] del get_versions # shortest allowed pass in minutes MIN_PASS = 4 # DRL still use the name JPSS-1 in the TLEs: NOAA20_NAME = {'NOAA-20': 'JPSS-1'} NUMBER_OF_FOVS = { 'avhrr': 2048, 'mhs': 90, 'amsua': 30, 'mwhs2': 98, 'atms': 96, 'ascat': 42, 'viirs': 6400, 'atms': 96, 'mwhs-2': 98 } SATELLITE_NAMES = {'npp': 'Suomi NPP', 'noaa19': 'NOAA 19', 'noaa18': 'NOAA 18', 'noaa15': 'NOAA 15', 'aqua': 'Aqua', 'terra': 'Terra', 'metopc': 'Metop-C', 'metopb': 'Metop-B', 'metopa': 'Metop-A', 'noaa20': 'NOAA-20', 'fengyun3d': 'FY-3D', 'fengyun3c': 'FY-3C' } INSTRUMENT = {'Suomi NPP': 'viirs', 'NOAA-20': 'viirs', 'Aqua': 'modis', 'Terra': 'modis', 'NOAA 19': 'avhrr', 'NOAA 18': 'avhrr', 'NOAA 15': 'avhrr', 'Metop-A': 'avhrr', 'Metop-B': 'avhrr', 'Metop-C': 'avhrr', 'FY-3D': 'avhrr', 'FY-3C': 'avhrr'} pytroll-schedule-0.6.0/trollsched/boundary.py000066400000000000000000000222721415440646100213660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 PyTroll community # Author(s): # Martin Raspaud # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The Boundary classes. """ import logging import logging.handlers import numpy as np from pyresample.boundary import Boundary from pyorbital import geoloc, geoloc_instrument_definitions logger = logging.getLogger(__name__) INSTRUMENT = {'avhrr/3': 'avhrr', 'avhrr/2': 'avhrr', 'avhrr-3': 'avhrr', 'mwhs-2': 'mwhs2'} class SwathBoundary(Boundary): """Boundaries for satellite overpasses. """ def get_instrument_points(self, overpass, utctime, scans_nb, scanpoints, scan_step=1): """Get the boundary points for a given overpass. """ instrument = overpass.instrument # logger.debug("Instrument: %s", str(instrument)) # cheating at the moment. # scan_angle = 55.37 if instrument == "modis": scan_angle = 55.0 instrument = "avhrr" elif instrument == "viirs": scan_angle = 55.84 instrument = "viirs" elif instrument == "iasi": scan_angle = 48.3 instrument = "avhrr" elif overpass.satellite == "noaa 16": scan_angle = 55.25 instrument = "avhrr" elif instrument.startswith("mersi"): scan_angle = 55.4 instrument = "avhrr" else: scan_angle = 55.25 instrument_fun = getattr(geoloc_instrument_definitions, INSTRUMENT.get(instrument, instrument)) if instrument.startswith("avhrr"): sgeom = instrument_fun(scans_nb, scanpoints, scan_angle=scan_angle, frequency=100) elif instrument in ["ascat", ]: sgeom = instrument_fun(scans_nb, scanpoints) elif instrument in ["amsua", 'mhs']: sgeom = instrument_fun(scans_nb, scanpoints) elif instrument in ["mwhs2", ]: sgeom = instrument_fun(scans_nb, scanpoints) elif instrument in ["olci", ]: sgeom = instrument_fun(scans_nb, scanpoints) elif instrument == 'viirs': sgeom = instrument_fun(scans_nb, scanpoints, scan_step=scan_step) elif instrument in ['mhs', 'atms', 'mwhs-2']: sgeom = instrument_fun(scans_nb, scanpoints) else: logger.warning("Instrument not tested: %s", instrument) sgeom = instrument_fun(scans_nb) times = sgeom.times(utctime) pixel_pos = geoloc.compute_pixels((self.orb.tle._line1, self.orb.tle._line2), sgeom, times) lons, lats, alts = geoloc.get_lonlatalt(pixel_pos, times) del alts return (lons.reshape(-1, len(scanpoints)), lats.reshape(-1, len(scanpoints))) def __init__(self, overpass, scan_step=50, frequency=200): # compute area covered by pass Boundary.__init__(self) self.overpass = overpass self.orb = overpass.orb # compute sides scanlength_seconds = ((overpass.falltime - overpass.risetime).seconds + (overpass.falltime - overpass.risetime).microseconds / 1000000.0) logger.debug("Instrument = %s", self.overpass.instrument) if self.overpass.instrument == 'viirs': sec_scan_duration = 1.779166667 along_scan_reduce_factor = 1 elif self.overpass.instrument.startswith("avhrr"): sec_scan_duration = 1. / 6. along_scan_reduce_factor = 0.1 elif self.overpass.instrument == 'ascat': sec_scan_duration = 3.74747474747 along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 1 elif self.overpass.instrument == 'amsua': sec_scan_duration = 8. along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 1 elif self.overpass.instrument == 'mhs': sec_scan_duration = 8./3. along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 1 elif self.overpass.instrument == 'mwhs2': sec_scan_duration = 8./3. along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 1 elif self.overpass.instrument == 'olci': # 3 minutes of data is 4091 300meter lines: sec_scan_duration = 0.04399902224395014 along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 100 elif self.overpass.instrument == 'atms': sec_scan_duration = 8/3. along_scan_reduce_factor = 1 # Overwrite the scan step scan_step = 1 else: # Assume AVHRR! logmsg = ("Instrument scan duration not known. Setting it to AVHRR. Instrument: ") logger.info(logmsg + "%s", str(self.overpass.instrument)) sec_scan_duration = 1. / 6. along_scan_reduce_factor = 0.1 # From pass length in seconds and the seconds for one scan derive the number of scans in the swath: scans_nb = scanlength_seconds / sec_scan_duration * along_scan_reduce_factor # Devide by the scan step to a reduced number of scans: scans_nb = np.floor(scans_nb / scan_step) scans_nb = int(max(scans_nb, 1)) sides_lons, sides_lats = self.get_instrument_points(self.overpass, overpass.risetime, scans_nb, np.array([0, self.overpass.number_of_fovs - 1]), scan_step=scan_step) side_shape = sides_lons[::-1, 0].shape[0] nmod = 1 if side_shape != scans_nb: nmod = side_shape // scans_nb logger.debug('Number of scan lines (%d) does not match number of scans (%d)', side_shape, scans_nb) logger.info('Take every %d th element on the sides...', nmod) self.left_lons = sides_lons[::-1, 0][::nmod] self.left_lats = sides_lats[::-1, 0][::nmod] self.right_lons = sides_lons[:, 1][::nmod] self.right_lats = sides_lats[:, 1][::nmod] # compute bottom maxval = self.overpass.number_of_fovs rest = maxval % frequency mid_range = np.arange(rest / 2, maxval, frequency) if mid_range[0] == 0: start_idx = 1 else: start_idx = 0 reduced = np.hstack([0, mid_range[start_idx::], maxval - 1]).astype('int') lons, lats = self.get_instrument_points(self.overpass, overpass.falltime, 1, reduced) self.bottom_lons = lons[0][::-1] self.bottom_lats = lats[0][::-1] # compute top lons, lats = self.get_instrument_points(self.overpass, overpass.risetime, 1, reduced) self.top_lons = lons[0] self.top_lats = lats[0] return def decimate(self, ratio): l = len(self.top_lons) start = (l % ratio) / 2 points = np.concatenate(([0], np.arange(start, l, ratio), [l - 1])) self.top_lons = self.top_lons[points] self.top_lats = self.top_lats[points] self.bottom_lons = self.bottom_lons[points] self.bottom_lats = self.bottom_lats[points] l = len(self.right_lons) start = (l % ratio) / 2 points = np.concatenate(([0], np.arange(start, l, ratio), [l - 1])) self.right_lons = self.right_lons[points] self.right_lats = self.right_lats[points] self.left_lons = self.left_lons[points] self.left_lats = self.left_lats[points] return def contour(self): lons = np.concatenate((self.top_lons, self.right_lons[1:-1], self.bottom_lons, self.left_lons[1:-1])) lats = np.concatenate((self.top_lats, self.right_lats[1:-1], self.bottom_lats, self.left_lats[1:-1])) return lons, lats pytroll-schedule-0.6.0/trollsched/combine.py000066400000000000000000000423641415440646100211630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2016, 2018 Martin Raspaud # # Author(s): # # Alexander Maul # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Combine several graphs. """ import logging from datetime import datetime, timedelta from trollsched.graph import Graph logger = logging.getLogger("trollsched") def add_graphs(graphs, passes, delay=timedelta(seconds=0)): """Add all graphs to one combined graph. """ statlst = graphs.keys() def count_neq_passes(pl): """Counts how many satellite passes in a list are really distinct (satellite/epoch).""" if len(pl): r = [] s = 1 for q in pl[1:]: if pl[0] != q: r.append(q) s += count_neq_passes(r) return s else: return 0 for s, g in graphs.items(): logger.debug("station: %s, order: %d", s, g.order) # Graphs and allpasses are hashmaps of sets, or similar, but we need # lists of lists, forthat they are copied. grl = [] pl = [] for s in statlst: grl.append(graphs[s]) pl.append(sorted(passes[s], key=lambda x: x.risetime)) # Rough estimate for the size of the combined passes' graph. n_vertices = 1 for g in grl: n_vertices += g.order n_vertices *= len(statlst) * 2 newgraph = Graph(n_vertices=n_vertices) logger.debug("newgraph order: %d", newgraph.order) # This value signals the end, when no more passes from any antenna are available. stopper = tuple((None, None) for s in range(len(statlst))) # The new passes list, it'll be filled with tuples, each with of one pass per antenna. # It's initialized with the first passes. # # TODO: ideally something like next line, but this doesn't work faultless # if one or more stations have multiple "first passes": # newpasses = [tuple((pl[s][p - 1], None) for s in range(len(statlst)) for p in grl[s].neighbours(0))] # # TODO: not "just the first vertix" with the line: # parlist = [newpasses[0]] # newpasses = [tuple((pl[s][grl[s].neighbours(0)[0] - 1], None) for s in range(len(statlst)))] parlist = [newpasses[0]] while len(parlist): newparlist = [] for parnode in parlist: if parnode == stopper: # All antennas reached the end of passes list in this path of # possibilities. # stopper == ((None,None) * stations) # # If this happens for all elements of parlist, newparlist will # stay empty and (at the bottom of this loop) replace parlist, # which as an empty list will cause the surrounding while-loop # to end. continue collected_newnodes = collect_nodes(0, parnode, grl, newgraph, newpasses, pl, delay) for newnode_list in collected_newnodes: newnode = tuple(newnode_list) if newnode not in newpasses: newpasses.append(newnode) if newnode not in newparlist: newparlist.append(newnode) # Collecting the weights from each stations weight-matrix ... # (could be more compact if it weren't for the None-values) wl = [] for s, p, n in zip(range(len(statlst)), parnode, newnode): try: if n[0] is None: wl.append(0) else: wl.append(n[1] or grl[s].weight(pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1)) except Exception: logger.error( "Collecting weights: stat %d - parnode %s %s - newnode %s %s", s, parnode, p, newnode, n, exc_info=1) raise # Apply vertix-count to the sum of collected weights. # vertix-count: number of vertices with reference to same # satellite pass, it can result to 0, 1, 2. w = sum(wl) / 2 ** ((2 * len(parnode)) - count_neq_passes(parnode) - count_neq_passes(newnode)) # TODO: if the starting point isn't "just the first vertix", # the comparison must be changed if parnode == newpasses[0]: # "virtual" weight for the starting point. newgraph.add_arc(0, newpasses.index(parnode) + 1, w) newgraph.add_arc(newpasses.index(parnode) + 1, newpasses.index(newnode) + 1, w) parlist = newparlist logger.debug("newpasses length: %d", len(newpasses)) return statlst, newgraph, newpasses def collect_nodes(statnr, parnode, graph_set, newgraph, newpasses, passes_list, delay=timedelta(seconds=0)): """Collect all nodes reachable from the nodes in parnode, creating all combinations. RETURN: [[a1, b1], [a1, b2], ..., [a2, b1], ...] """ # All collected nodes are virtually occuring at the same time, so some nodes # might be pulled up in the timeline to create a set "overlapping" passes. # If there are no more passes available for one station, None is set. bufflist = [] p = parnode[statnr] g = graph_set[statnr] def overlap_any(this, test_list): """Tests if this overlapps any of the new-nodes in test_list. The new-nodes are in form (vertix, simulated-weight), only nodes without simulated weight are considered in the test. RETURN: -1 | 0 | +1 , if this lies before, overlapps any, or lies after the nodes in test_list. """ if len(test_list) == 0: return 0 minrise = datetime.utcnow() + timedelta(days=300) maxfall = datetime.utcnow() - timedelta(days=300) for p in test_list: if p[0] is None or p[1] is not None: continue if p[0].risetime < minrise: minrise = p[0].risetime if p[0].falltime > maxfall: maxfall = p[0].falltime if minrise > maxfall: return 0 elif this.falltime < minrise: return -1 elif this.risetime > maxfall: return +1 else: return 0 if p == (None, None): # There won't be any collectable nodes. # This None will act as a filler in the combined-vertices-tuples, # to get the access-by-index right. gn = [None] elif p[1] is not None: # A simulated parent node is set as neighbours' list. # It'll be processed as if it's the node which occurs in this # time-slot -- which it propably does, otherwise it's subjected # to simulation (again!). gn = [passes_list[statnr].index(p[0]) + 1] else: # Special cases aside, this creates a list of neighbours to the # current passes node. try: gn = g.neighbours(passes_list[statnr].index(p[0]) + 1) except Exception: print("len(passes_list)", len(passes_list), " len(graph_set)", len(graph_set), " statnr", statnr, " p", p) print("passes_list", passes_list) raise if gn[0] > len(passes_list[statnr]): # But if there weren't any neighbours, set an empty list. gn = [None] if statnr + 1 == len(parnode): # It's the 'rightmost' of the list parnode, # and the deepest point of the recursion. if None in gn: # That's "no further connection". # It get's a special treatment, because there is no None in the # passes-list we could access by index. bufflist = [[(None, None)]] else: # Prepare to return just the list of neighbouring vertices. for m in zip((passes_list[statnr][n - 1] for n in gn), (None for _ in gn)): bufflist.append([m]) else: # Since it's not the last element of the list parnode, we recurse and # then permutade all vertix-lists together. col = collect_nodes(statnr + 1, parnode, graph_set, newgraph, newpasses, passes_list) # Creating the permutation of all neighbours with the list returned # by the recursion. # A simulated parent node is seen as a regular list of neighbours. for n in gn: for cx in col: try: if n is None: # The end-of-neighbours dummy. cc = cx[:] cc.insert(0, (None, None)) bufflist.append(cc) else: # Are two passes are overlapping? overlap = overlap_any(passes_list[statnr][n - 1], cx) if overlap == 0: # Two passes overlapping, no special handling required. cc = cx[:] cc.insert(0, ( (passes_list[statnr][n - 1], None) )) bufflist.append(cc) elif overlap > 0: # If the current parent node's pass is not overlapping # but AFTER the pass from the recursion-return-list # the current parent node gets "simulated". cc = cx[:] cc.insert(0, ( passes_list[statnr][n - 1], g.weight(passes_list[statnr].index(p[0]) + 1, n) )) bufflist.append(cc) elif overlap < 0: # If the current parent node's pass is not overlapping # but BEFORE the pass from the recursion-return-list # the recursion-list-node gets "simulated". cc = [ (c[0], graph_set[s].weight( passes_list[s].index(parnode[s][0]) + 1, passes_list[s].index(c[0]) + 1 ) ) if c != (None, None) else (None, None) for s, c in zip(range(statnr + 1, len(parnode)), cx) ] cc.insert(0, (passes_list[statnr][n - 1], None)) bufflist.append(cc) else: print("uh-oh, something curious happened ...") except Exception: print("\nCATCH\ngn:", gn, "-> n", n, " col:", col, "-> cx", cx, "statnr", statnr, "statnr+i", statnr + 1) print("len(passes_list -n -cx)", len(passes_list[statnr]), len(passes_list[statnr + 1])) for s in range(statnr, len(passes_list)): print("passes_list[", s, "] =>", passes_list[s]) raise return bufflist def get_combined_sched(allgraphs, allpasses, delay_sec=60): delay = timedelta(seconds=delay_sec) statlst, newgraph, newpasses = add_graphs(allgraphs, allpasses, delay) # >>> DEV: test if the graphs could be "folded" to use use less RAM. # for s, g in allgraphs.items(): # print "test folding", s # test_folding(g) # print "test folding newgraph" # if test_folding(newgraph): # print_matrix(newgraph.adj_matrix, 25, 36) # <<< dist, path = newgraph.dag_longest_path(0, len(newpasses)) logger.debug("Distance: %d", dist) logger.debug("Path through newpasses: %s", path) del dist return statlst, [newpasses[idx - 1] for idx in path[1:-1]], (newgraph, newpasses) def print_matrix(m, ly=-1, lx=-1): """For DEBUG: Prints one of the graphs' backing matrix without flooding the screen. It'll print the first lx columns from the first ly rows, then the last lx columns from the last ly rows. """ for i, l in zip(range(ly), m[0:ly]): print(i, ":", l[:lx], "...") print("[..., ...]") for i, l in zip(range(len(m) - ly - 1, len(m) - 1), m[-ly:]): print(i, ": ...", l[-lx:]) def test_folding(g): """Test if the graphs could be "folded", or better "squished", to reduce size on cost of calculating the real x',y' from the x,y of the "unfolded" graph. """ r = False for u in range(g.order): for n in g.neighbours(u): if n < u: print(n, "<", u) r = True return r def main(): import logging import logging.handlers import os import pickle from trollsched.schedule import parse_datetime from trollsched.schedule import combined_stations, build_filename try: from trollsched.schedule import read_config import argparse logger = logging.getLogger("trollsched") parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", default=None, help="configuration file to use") parser.add_argument("-s", "--start-time", type=parse_datetime, help="start time of the schedule to compute") parser.add_argument("-o", "--output-dir", default=None, help="where to put generated files") parser.add_argument("-x", "--xml", action="store_true", help="generate an xml request file (schedule)" ) parser.add_argument("-r", "--report", action="store_true", help="generate an xml report file (schedule)") parser.add_argument("--scisys", action="store_true", help="generate a SCISYS schedule file") parser.add_argument("-p", "--plot", action="store_true", help="generate plot images") parser.add_argument("-g", "--graph", action="store_true", help="save graph info") opts = parser.parse_args() if opts.config is None: parser.error("Configuration file required.") if opts.start_time: start_time = opts.start_time else: start_time = datetime.utcnow() # [coords, station, area, scores], forward, start, pattern station_list, forward, start, pattern = read_config(opts.config) pattern_args = { "output_dir": opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } dir_output = build_filename("dir_output", pattern, pattern_args) if not os.path.exists(dir_output): print(dir_output, "does not exist!") sys.exit(1) ph = open(os.path.join(dir_output, "opts.pkl"), "rb") opts = pickle.load(ph) ph.close() graph = {} allpasses = {} for coords, station, area, scores in station_list: pattern_args["station"] = station graph[station] = Graph() graph[station].load(build_filename("file_graph", pattern, pattern_args) + ".npz") # print "---",station,"---" # print_matrix(graph[station].adj_matrix, ly=5) # print_matrix(graph[station].weight_matrix, ly=5, lx=-1) # allpasses[station] = get_passes_from_xml_file(os.path.join(opts.report, # "acquisition-schedule-report." + station + ".xml")) # print len(allpasses[station]),allpasses[station] # for v in graph[station].neighbours(1): # print v, " : ", allpasses[station][v].risetime, "->", graph[station].weight(1, v) ph = open( os.path.join(build_filename("dir_output", pattern, pattern_args), "allpasses.%s.pkl" % station), "rb") allpasses[station] = pickle.load(ph) ph.close() from trollsched.schedule import conflicting_passes totpas = [] for s, sp in allpasses.items(): print("len(sp)", s, len(sp)) totpas.extend(list(sp)) passes = sorted(totpas, key=lambda x: x.risetime) cpg = conflicting_passes(passes, timedelta(seconds=600)) print("ALLPASSES", len(allpasses)) # ,allpasses print("PASSES", len(passes)) # ,passes print("CONFLGRPS", len(cpg)) # ,cpg print("MAX", max([len(g) for g in cpg])) combined_stations(opts, pattern, station_list, graph, allpasses, start_time, start, forward) except Exception: logger.exception("Something wrong happened!") raise if __name__ == '__main__': main() pytroll-schedule-0.6.0/trollsched/compare.py000066400000000000000000000157241415440646100211750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 Martin Raspaud # Author(s): # Martin Raspaud # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Compare the request file and the confirmation file. """ import logging import logging.handlers import sys import os import glob logger = logging.getLogger(__name__) def xml_compare(x1_, x2_, reporter=None, skiptags=None): """Compare xml objects. """ if x1_.tag != x2_.tag: if reporter: reporter('Tags do not match: %s and %s' % (x1_.tag, x2_.tag)) return False for name, value in x1_.attrib.items(): if x2_.attrib.get(name) != value: if reporter: reporter('Attributes do not match: %s=%r, %s=%r' % (name, value, name, x2_.attrib.get(name))) return False for name in x2_.attrib.keys(): if name not in x1_.attrib: if reporter: reporter('x2_ has an attribute x1_ is missing: %s' % name) return False if not text_compare(x1_.text, x2_.text): if reporter: reporter('text: %r != %r' % (x1_.text, x2_.text)) return False if not text_compare(x1_.tail, x2_.tail): if reporter: reporter('tail: %r != %r' % (x1_.tail, x2_.tail)) return False cl1 = x1_.getchildren() cl2 = x2_.getchildren() if len(cl1) != len(cl2): if reporter: reporter('not the same number of passes, %i != %i' % (len(cl1), len(cl2))) return False i = 0 for c1, c2 in zip(cl1, cl2): i += 1 if skiptags and (c1.tag in skiptags): continue if not xml_compare(c1, c2, reporter=reporter): if reporter: reporter('element %i do not match: %s' % (i, c1.tag)) return False return True def text_compare(t1_, t2_): """Compare text fields. """ if not t1_ and not t2_: return True if t1_ == '*' or t2_ == '*': return True return (t1_ or '').strip() == (t2_ or '').strip() def compare(file1, file2): """Compare two xml files, request and confirmation. """ import xml.etree.ElementTree as ET xml1 = ET.parse(file1).getroot() xml2 = ET.parse(file2).getroot() if xml_compare(xml1, xml2, logger.error, ["confirmed-by", "confirmed-on", "properties"]): logger.info("All passes confirmed.") return True else: return False # import fnmatch # import pyinotify # class EventHandler(pyinotify.ProcessEvent): # """Manage events. # """ # def process_IN_CLOSE_WRITE(self, event): # if not fnmatch.fnmatch(event.pathname, "*-acquisition-schedule-confirmation-???.xml"): # return # logger.info("Processing: %s", event.pathname) # reqname = event.pathname[:-20] + "request" + event.pathname[-8:] # logger.info("Validating against: %s", reqname) # compare(reqname, event.pathname) # sys.exit(0) # def process_IN_MOVED_TO(self, event): # self.process_IN_CLOSE_WRITE(event) def run(): import argparse parser = argparse.ArgumentParser() parser.add_argument("-f", "--file", help="req/conf files to compare", nargs=2) parser.add_argument("-m", "--mail", nargs="*", help="mail address(es) to send error messages to.", default=None) parser.add_argument("-v", "--verbose", help="activate debug messages", action="store_true") parser.add_argument("-l", "--log", help="file to log to") # parser.add_argument("-w", "--watch", # help="directory to watch for new confirmation files") parser.add_argument("-r", "--most-recent", help="check the most recent request against the" + " corresponding confirmation, from the given directory") parser.add_argument("-c", "--confirmation", help="directory for the confirmation files") opts = parser.parse_args() if opts.log: handler = logging.handlers.TimedRotatingFileHandler(opts.log, "midnight", backupCount=7) else: handler = logging.StreamHandler() handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) if opts.verbose: loglevel = logging.DEBUG else: loglevel = logging.INFO handler.setLevel(loglevel) logging.getLogger('').setLevel(loglevel) logging.getLogger('').addHandler(handler) if opts.mail: mhandler = logging.handlers.SMTPHandler("localhost", "satsateknik@smhi.se", opts.mail, "Scheduler confirmation") mhandler.setLevel(logging.WARNING) logging.getLogger('').addHandler(mhandler) logger = logging.getLogger("compare") logger.debug("DEBUG on") if opts.file: compare(opts.file[0], opts.file[1]) # if opts.watch: # wm = pyinotify.WatchManager() # Watch Manager # mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO # handler = EventHandler() # notifier = pyinotify.Notifier(wm, handler) # wdd = wm.add_watch(opts.watch, mask, rec=False) # notifier.loop() if opts.most_recent: logger.debug("looking for most recent file in " + os.path.join(opts.most_recent, "*request*.xml")) filelist = glob.glob(os.path.join(opts.most_recent, "*request*.xml")) newest = max(filelist, key=lambda x: os.stat(x).st_mtime) logger.debug("checking " + newest) reqdir, newfile = os.path.split(newest) confdir = opts.confirmation or reqdir confname = os.path.join(confdir, newfile[:-15] + "confirmation" + newfile[-8:]) logger.debug("against " + confname) try: compare(newest, confname) except IOError: logger.exception("Something went wrong!") if __name__ == '__main__': run() pytroll-schedule-0.6.0/trollsched/drawing.py000066400000000000000000000166471415440646100212070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Pytroll Community # Author(s): # Adam.Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Drawing satellite overpass outlines on maps """ import os import logging import logging.handlers import numpy as np import matplotlib as mpl MPL_BACKEND = mpl.get_backend() logger = logging.getLogger(__name__) try: import cartopy.crs as ccrs import cartopy.feature as cfeature BASEMAP_NOT_CARTOPY = False except ImportError: logger.warning("Failed loading Cartopy, will try Basemap instead") BASEMAP_NOT_CARTOPY = True if not BASEMAP_NOT_CARTOPY: import cartopy cartopy.config['pre_existing_data_dir'] = os.environ.get( "CARTOPY_PRE_EXISTING_DATA_DIR", cartopy.config['pre_existing_data_dir']) class MapperBasemap(object): """A class to generate nice plots with basemap. """ def __init__(self, **proj_info): from mpl_toolkits.basemap import Basemap if not proj_info: proj_info = { 'projection': 'nsper', 'lat_0': 58, 'lon_0': 16, 'resolution': 'l', 'area_thresh': 1000. } self.map = Basemap(**proj_info) self.map.drawcoastlines() self.map.drawcountries() self.map.drawmapboundary(fill_color='white') self.map.drawmeridians(np.arange(0, 360, 30)) self.map.drawparallels(np.arange(-90, 90, 30)) def __enter__(self): return self.map def __exit__(self, etype, value, tb): pass class MapperCartopy(object): """A class to generate nice plots with Cartopy. """ def __init__(self, **proj_info): mpl.use(MPL_BACKEND) import matplotlib.pyplot as plt if not proj_info: proj_info = { 'central_latitude': 58, 'central_longitude': 16, 'satellite_height': 35785831, 'false_easting': 0, 'false_northing': 0, 'globe': None } fig = plt.figure(figsize=(8, 6)) self._ax = fig.add_subplot( 1, 1, 1, projection=ccrs.NearsidePerspective(**proj_info)) self._ax.add_feature(cfeature.OCEAN, zorder=0) self._ax.add_feature(cfeature.LAND, zorder=0, edgecolor='black') self._ax.add_feature(cfeature.BORDERS, zorder=0) self._ax.set_global() self._ax.gridlines() def plot(self, *args, **kwargs): mpl.use(MPL_BACKEND) import matplotlib.pyplot as plt kwargs['transform'] = ccrs.Geodetic() return plt.plot(*args, **kwargs) def nightshade(self, utctime, **kwargs): from trollsched.helper_functions import fill_dark_side color = kwargs.get('color', 'black') alpha = kwargs.get('alpha', 0.4) fill_dark_side(self._ax, time=utctime, color=color, alpha=alpha) def __call__(self, *args): return args def __enter__(self): return self def __exit__(self, etype, value, tb): pass if BASEMAP_NOT_CARTOPY: Mapper = MapperBasemap else: Mapper = MapperCartopy def save_fig(pass_obj, poly=None, directory="/tmp/plots", overwrite=False, labels=None, extension=".png", outline='-r', plot_parameters=None, plot_title=None, poly_color=None): """Save the pass as a figure. Filename is automatically generated. """ poly = poly or [] poly_color = poly_color or [] if not isinstance(poly, (list, tuple)): poly = [poly] if not isinstance(poly_color, (list, tuple)): poly_color = [poly_color] mpl.use('Agg') import matplotlib.pyplot as plt plt.clf() logger.debug("Save fig " + str(pass_obj)) rise = pass_obj.risetime.strftime("%Y%m%d%H%M%S") fall = pass_obj.falltime.strftime("%Y%m%d%H%M%S") if not os.path.exists(directory): logger.debug("Create plot dir " + directory) os.makedirs(directory) filename = '{rise}_{satname}_{instrument}_{fall}{extension}'.format(rise=rise, satname=pass_obj.satellite.name.replace( " ", "_"), instrument=pass_obj.instrument.replace( "/", "-"), fall=fall, extension=extension) filepath = os.path.join(directory, filename) pass_obj.fig = filepath if not overwrite and os.path.exists(filepath): return filepath logger.debug("Filename = <%s>", filepath) plot_parameters = plot_parameters or {} with Mapper(**plot_parameters) as mapper: mapper.nightshade(pass_obj.uptime, alpha=0.2) for i, polygon in enumerate(poly): try: col = poly_color[i] except IndexError: col = '-b' draw(polygon, mapper, col) logger.debug("Draw: outline = <%s>", outline) draw(pass_obj.boundary.contour_poly, mapper, outline) logger.debug("Title = %s", str(pass_obj)) if not plot_title: plt.title(str(pass_obj)) else: plt.title(plot_title) for label in labels or []: plt.figtext(*label[0], **label[1]) logger.debug("Save plot...") plt.savefig(filepath) logger.debug("Return...") return filepath def show(pass_obj, poly=None, labels=None, other_poly=None, proj=None, outline='-r'): """Show the current pass on screen (matplotlib, basemap). """ mpl.use(MPL_BACKEND) import matplotlib.pyplot as plt proj = proj or {} with Mapper(**proj) as mapper: mapper.nightshade(pass_obj.uptime, alpha=0.2) draw(pass_obj.boundary.contour_poly, mapper, outline) if poly is not None: draw(poly, mapper, "-b") if other_poly is not None: draw(other_poly, mapper, "-g") plt.title(str(pass_obj)) for label in (labels or []): plt.figtext(*label[0], **label[1]) plt.show() def draw(poly, mapper, options, **more_options): lons = np.rad2deg(poly.lon.take(np.arange(len(poly.lon) + 1), mode="wrap")) lats = np.rad2deg(poly.lat.take(np.arange(len(poly.lat) + 1), mode="wrap")) rx, ry = mapper(lons, lats) mapper.plot(rx, ry, options, **more_options) def main(): from trollsched.satpass import get_next_passes from datetime import datetime passes = get_next_passes(["noaa 19", "suomi npp"], datetime.now(), 24, (16, 58, 0)) for p in passes: save_fig(p, directory="/tmp/plots/") if __name__ == '__main__': main() pytroll-schedule-0.6.0/trollsched/graph.py000066400000000000000000000114701415440646100206420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 Martin Raspaud # Author(s): # Martin Raspaud # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Graph manipulation. """ import numpy as np class Graph(object): def __init__(self, n_vertices=None, adj_matrix=None): if n_vertices is not None: self.order = n_vertices self.vertices = np.arange(self.order) self.adj_matrix = np.zeros((self.order, self.order), np.bool) self.weight_matrix = np.zeros((self.order, self.order), np.float) elif adj_matrix is not None: self.order = adj_matrix.shape[0] self.vertices = np.arange(self.order) self.adj_matrix = adj_matrix self.weight_matrix = np.zeros_like(adj_matrix) def weight(self, u, v): """weight of the *u*-*v* edge. """ return self.weight_matrix[u, v] def neighbours(self, v): return self.vertices[self.adj_matrix[v, :] != 0] def add_edge(self, v1, v2, weight=1): self.weight_matrix[v1, v2] = weight self.weight_matrix[v2, v1] = weight self.adj_matrix[v1, v2] = True self.adj_matrix[v2, v1] = True def add_arc(self, v1, v2, weight=1): self.adj_matrix[v1, v2] = True self.weight_matrix[v1, v2] = weight def bron_kerbosch(self, r, p, x): """Get the maximal cliques. """ if len(p) == 0 and len(x) == 0: yield r for v in p: for res in self.bron_kerbosch(r | set((v, )), p & set(self.neighbours(v)), x & set(self.neighbours(v))): yield res p = p - set((v, )) x = x | set((v, )) def dag_longest_path(self, v1, v2=None): """Give the longest path from *v1* to all other vertices or *v2* if specified. Assumes the vertices are sorted topologically and that the graph is directed and acyclic (DAG). """ self.weight_matrix = -self.weight_matrix dist, path = self.dag_shortest_path(v1, v2) self.weight_matrix = -self.weight_matrix return dist, path def dag_shortest_path(self, v1, v2=None): """Give the sortest path from *v1* to all other vertices or *v2* if specified. Assumes the vertices are sorted topologically and that the graph is directed and acyclic (DAG). *v1* and *v2* are the indices of the vertices in the vertice list. """ # Dijkstra for DAGs. dists = [np.inf] * self.order paths = [list() for _ in range(self.order)] dists[v1] = 0 for u in self.vertices: # could be interrupted when we reach v2 ? for v in self.neighbours(u): if (dists[v] > dists[u] + self.weight(u, v)): dists[v] = dists[u] + self.weight(u, v) paths[v] = u if v2 is None: return dists, paths else: end = v2 path = [end] while end != v1: path.append(paths[end]) end = paths[end] return dists[v2], path def save(self, filename): np.savez_compressed(filename, adj=self.adj_matrix, weights=self.weight_matrix) def load(self, filename): stuff = np.load(filename) self.adj_matrix = stuff["adj"] self.weight_matrix = stuff["weights"] self.order = self.adj_matrix.shape[0] self.vertices = np.arange(self.order) def export(self, filename="./sched.gv", labels=None): """dot sched.gv -Tpdf -otruc.pdf """ with open(filename, "w") as fd_: fd_.write("digraph schedule { \n size=\"80, 10\";\n center=\"1\";\n") for v1 in range(1, self.order - 1): for v2 in range(1, self.order - 1): if self.adj_matrix[v1, v2]: fd_.write('"' + str(labels[v1 - 1]) + '"' + " -> " + '"' + str(labels[v2 - 1]) + '"' + ' [ label = "' + str(self.weight_matrix[v1, v2]) + '" ];\n') fd_.write("}\n") pytroll-schedule-0.6.0/trollsched/helper_functions.py000066400000000000000000000063071415440646100231130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Adam.Dybbroe # Author(s): # Adam.Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Helper functions for the pytroll-schedule methods. E.g. nightshade method for Cartopy as available in Basemap """ import numpy as np from datetime import datetime def sun_pos(dt=None): """This function computes a rough estimate of the coordinates for the point on the surface of the Earth where the Sun is directly overhead at the time dt. Precision is down to a few degrees. This means that the equinoxes (when the sign of the latitude changes) will be off by a few days. The function is intended only for visualization. For more precise calculations consider for example the PyEphem package. Taken from here: https://scitools.org.uk/cartopy/docs/latest/gallery/aurora_forecast.html Parameters ---------- dt: datetime Defaults to datetime.utcnow() Returns ------- lat, lng: tuple of floats Approximate coordinates of the point where the sun is in zenith at the time dt. """ if dt is None: dt = datetime.utcnow() axial_tilt = 23.4 ref_solstice = datetime(2016, 6, 21, 22, 22) days_per_year = 365.2425 seconds_per_day = 24 * 60 * 60.0 days_since_ref = (dt - ref_solstice).total_seconds() / seconds_per_day lat = axial_tilt * np.cos(2 * np.pi * days_since_ref / days_per_year) sec_since_midnight = (dt - datetime(dt.year, dt.month, dt.day)).seconds lng = -(sec_since_midnight / seconds_per_day - 0.5) * 360 return lat, lng def fill_dark_side(ax, time=None, **kwargs): """ Plot a fill on the dark side of the planet (without refraction). Parameters ---------- ax : Matplotlib axes The axes to plot on. time : datetime The time to calculate terminator for. Defaults to datetime.utcnow() **kwargs : Passed on to Matplotlib's ax.fill() """ import cartopy.crs as ccrs lat, lng = sun_pos(time) pole_lng = lng if lat > 0: pole_lat = -90 + lat central_rot_lng = 180 else: pole_lat = 90 + lat central_rot_lng = 0 rotated_pole = ccrs.RotatedPole(pole_latitude=pole_lat, pole_longitude=pole_lng, central_rotated_longitude=central_rot_lng) x = np.empty(360) y = np.empty(360) x[:180] = -90 y[:180] = np.arange(-90, 90.) x[180:] = 90 y[180:] = np.arange(90, -90., -1) ax.fill(x, y, transform=rotated_pole, **kwargs) pytroll-schedule-0.6.0/trollsched/satpass.py000066400000000000000000000715011415440646100212200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 - 2019 PyTroll Community # Author(s): # Martin Raspaud # Alexander Maul # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Satellite passes. """ import ftplib import glob import logging import logging.handlers import operator import os import socket import sys from datetime import datetime, timedelta from functools import reduce as fctools_reduce from tempfile import mkstemp import numpy as np from urllib.parse import urlparse from pyorbital import orbital, tlefile from pyresample.boundary import AreaDefBoundary from trollsched import MIN_PASS, NOAA20_NAME, NUMBER_OF_FOVS from trollsched.boundary import SwathBoundary logger = logging.getLogger(__name__) VIIRS_PLATFORM_NAMES = ['SUOMI NPP', 'SNPP', 'NOAA-20', 'NOAA 20'] MERSI_PLATFORM_NAMES = ['FENGYUN 3C', 'FENGYUN-3C', 'FY-3C'] MERSI2_PLATFORM_NAMES = ['FENGYUN 3D', 'FENGYUN-3D', 'FY-3D', 'FENGYUN 3E', 'FENGYUN-3E', 'FY-3E'] class SimplePass(object): """A pass: satellite, risetime, falltime, (orbital) """ buffer = timedelta(minutes=2) def __init__(self, satellite, risetime, falltime): if not hasattr(satellite, 'name'): from trollsched.schedule import Satellite self.satellite = Satellite(satellite, 0, 0) else: self.satellite = satellite self.risetime = risetime self.falltime = falltime self.score = {} self.subsattrack = {"start": None, "end": None} self.rec = False self.fig = None def __hash__(self): return super.__hash__(self) def overlaps(self, other, delay=timedelta(seconds=0)): """Check if two passes overlap in time. """ return ((self.risetime < other.falltime + delay) and (self.falltime + delay > other.risetime)) def __lt__(self, other): return self.uptime < other.uptime def __gt__(self, other): return self.uptime > other.uptime def __cmp__(self, other): if self.uptime < other.uptime: return -1 if self.uptime > other.uptime: return 1 else: return 0 def __eq__(self, other): """Determine if two satellite passes are the same.""" # Two passes, maybe observed from two distinct stations, are compared by # a) satellite name and orbit number, # or if the later is not available # b) the time difference between rise- and fall-times. if other is not None and isinstance(self, Pass) and isinstance( other, Pass): return (self.satellite.name == other.satellite.name and self.orb.get_orbit_number(self.risetime) == other.orb.get_orbit_number(other.risetime)) tol = timedelta(seconds=1) return (other is not None and abs(self.risetime - other.risetime) < tol and abs(self.falltime - other.falltime) < tol and self.satellite == other.satellite) def __str__(self): return (self.satellite.name + " " + self.risetime.isoformat() + " " + self.falltime.isoformat()) def __repr__(self): return str(self) def duration(self): """Get the duration of an overpass. """ return self.falltime - self.risetime def seconds(self): """Get the duration of an overpass. """ duration = self.duration() return (duration.days * 24 * 60 * 60 + duration.seconds + duration.microseconds * 1e-6) class Pass(SimplePass): """A pass: satellite, risetime, falltime, (orbital) """ def __init__(self, satellite, risetime, falltime, **kwargs): SimplePass.__init__(self, satellite, risetime, falltime) logger.debug("kwargs: %s", str(kwargs)) orb = kwargs.get('orb', None) uptime = kwargs.get('uptime', None) instrument = kwargs.get('instrument', None) tle1 = kwargs.get('tle1', None) tle2 = kwargs.get('tle2', None) logger.debug("instrument: %s", str(instrument)) if isinstance(instrument, (list, set)): if 'avhrr' in instrument: logger.warning("Instrument is a sequence! Assume avhrr...") instrument = 'avhrr' elif 'viirs' in instrument: logger.warning("Instrument is a sequence! Assume viirs...") instrument = 'viirs' elif 'modis' in instrument: logger.warning("Instrument is a sequence! Assume modis...") instrument = 'modis' elif 'mersi' in instrument: logger.warning("Instrument is a sequence! Assume mersi...") instrument = 'mersi' elif 'mersi-2' in instrument: logger.warning("Instrument is a sequence! Assume mersi-2...") instrument = 'mersi-2' else: raise TypeError("Instrument is a sequence! Don't know which one to choose!") default = NUMBER_OF_FOVS.get(instrument, 2048) self.number_of_fovs = kwargs.get('number_of_fovs', default) # The frequency shouldn't actualy depend on the number of FOVS along a scanline should it!? # frequency = kwargs.get('frequency', int(self.number_of_fovs / 4)) frequency = kwargs.get('frequency', 300) self.station = None self.max_elev = None self.uptime = uptime or (risetime + (falltime - risetime) / 2) self.instrument = instrument self.frequency = frequency if orb: self.orb = orb else: try: self.orb = orbital.Orbital(satellite, line1=tle1, line2=tle2) except KeyError as err: logger.debug('Failed in PyOrbital: %s', str(err)) self.orb = orbital.Orbital( NOAA20_NAME.get(satellite, satellite), line1=tle1, line2=tle2) logger.info('Using satellite name %s instead', str(NOAA20_NAME.get(satellite, satellite))) self._boundary = None @property def boundary(self): if not self._boundary: self._boundary = SwathBoundary(self, frequency=self.frequency) return self._boundary @boundary.setter def boundary(self, value): self._boundary = SwathBoundary(self, frequency=self.frequency) def pass_direction(self): """Get the direction of the pass in (ascending, descending). """ start_lat = self.orb.get_lonlatalt(self.risetime)[1] end_lat = self.orb.get_lonlatalt(self.falltime)[1] if start_lat > end_lat: return "descending" else: return "ascending" def slsearch(self, sublat): """Find sublatitude. """ def nadirlat(minutes): return self.orb.get_lonlatalt(self.risetime + timedelta( minutes=np.float64(minutes)))[1] - sublat def get_root(fun, start, end): p = np.polyfit( [start, (start + end) / 2.0, end], [fun(start), fun((start + end) / 2), fun(end)], 2) for root in np.roots(p): if root <= end and root >= start: return root arr = np.array([nadirlat(m) for m in range(15)]) a = np.where(np.diff(np.sign(arr)))[0] for guess in a: sublat_mins = get_root(nadirlat, guess, guess + 1) return self.risetime + timedelta(minutes=sublat_mins) def area_coverage(self, area_of_interest): """Get the ratio of coverage (between 0 and 1) of the pass with the area of interest. """ try: area_boundary = area_of_interest.poly except AttributeError: area_boundary = AreaDefBoundary(area_of_interest, frequency=100) area_boundary = area_boundary.contour_poly inter = self.boundary.contour_poly.intersection(area_boundary) if inter is None: return 0 return inter.area() / area_boundary.area() def generate_metno_xml(self, coords, root): import xml.etree.ElementTree as ET asimuth_at_max_elevation, max_elevation = self.orb.get_observer_look(self.uptime, *coords) pass_direction = self.pass_direction().capitalize()[:1] # anl = self.orb.get_lonlatalt(self.orb.get_last_an_time(self.risetime))[0] % 360 asimuth_at_aos, aos_elevation = self.orb.get_observer_look(self.risetime, *coords) orbit = self.orb.get_orbit_number(self.risetime) # aos_epoch=int((self.risetime-datetime(1970,1,1)).total_seconds()) sat_lon, sat_lat, alt = self.orb.get_lonlatalt(self.risetime) ovpass = ET.SubElement(root, "pass") ovpass.set("satellite", self.satellite.name) ovpass.set("aos", self.risetime.strftime("%Y%m%d%H%M%S")) ovpass.set("los", self.falltime.strftime("%Y%m%d%H%M%S")) ovpass.set("orbit", "{:d}".format(orbit)) ovpass.set("max-elevation", "{:.3f}".format(max_elevation)) ovpass.set("asimuth-at-max-elevation", "{:.3f}".format(asimuth_at_max_elevation)) ovpass.set("asimuth-at-aos", "{:.3f}".format(asimuth_at_aos)) ovpass.set("pass-direction", pass_direction) ovpass.set("satellite-lon-at-aos", "{:.3f}".format(sat_lon)) ovpass.set("satellite-lat-at-aos", "{:.3f}".format(sat_lat)) ovpass.set("tle-epoch", self.orb.orbit_elements.epoch.astype(datetime).strftime("%Y%m%d%H%M%S.%f")) if self.fig: ovpass.set("figure", self.fig) return True def print_meos(self, coords, line_no): """ No. Date Satellite Orbit Max EL AOS Ovlp LOS Durtn Az(AOS/MAX) """ asimuth_at_max_elevation, max_elevation = self.orb.get_observer_look(self.uptime, *coords) pass_direction = self.pass_direction().capitalize()[:1] # anl = self.orb.get_lonlatalt(self.orb.get_last_an_time(self.risetime))[0] % 360 asimuth_at_aos, aos_elevation = self.orb.get_observer_look(self.risetime, *coords) orbit = self.orb.get_orbit_number(self.risetime) aos_epoch = int((self.risetime - datetime(1970, 1, 1)).total_seconds()) sat_lon, sat_lat, alt = self.orb.get_lonlatalt(self.risetime) dur_secs = (self.falltime - self.risetime).seconds dur_hours, dur_reminder = divmod(dur_secs, 3600) dur_minutes, dur_seconds = divmod(dur_reminder, 60) duration = "{:0>2}:{:0>2}".format(dur_minutes, dur_seconds) satellite_meos_translation = {"NOAA 19": "NOAA_19", "NOAA 18": "NOAA_18", "NOAA 15": "NOAA_15", "METOP-A": "M02", "METOP-B": "M01", "FENGYUN 3A": "FENGYUN-3A", "FENGYUN 3B": "FENGYUN-3B", "FENGYUN 3C": "FENGYUN-3C", "SUOMI NPP": "NPP"} import hashlib pass_key = hashlib.md5(("{:s}|{:d}|{:d}|{:.3f}|{:.3f}". format(satellite_meos_translation.get(self.satellite.name.upper(), self.satellite.name.upper()), int(orbit), aos_epoch, sat_lon, sat_lat)).encode('utf-8')).hexdigest() line_list = [" {line_no:>2}", "{date}", "{satellite:<10}", "{orbit:>5}", "{elevation:>6.3f} ", "{risetime}", "{overlap:<5s}", "{falltime}", "{duration}", "{asimuth_at_aos:>5.1f}", "{asimuth_at_max:>5.1f}", "-- Undefined(Scheduling not done {aos_epoch} )", "{passkey}", "{pass_direction}" ] line = " ".join(line_list).format( # line_no=line_no, line_no=1, date=self.risetime.strftime("%Y%m%d"), satellite=satellite_meos_translation.get(self.satellite.name.upper(), self.satellite.name.upper()), orbit=orbit, elevation=max_elevation, risetime=self.risetime.strftime("%H:%M:%S"), overlap="n/a", falltime=self.falltime.strftime("%H:%M:%S"), duration=duration, asimuth_at_aos=asimuth_at_aos, asimuth_at_max=asimuth_at_max_elevation, aos_epoch=aos_epoch, passkey=pass_key, pass_direction=pass_direction) return line def print_vcs(self, coords): """Should look like this:: # SCName RevNum Risetime Falltime Elev Dura ANL Rec Dir Man Ovl OvlSCName # OvlRev OvlRisetime OrigRisetime OrigFalltime OrigDuration # NOAA 19 24845 20131204 001450 20131204 003003 32.0 15.2 225.6 Y Des N N none # 0 19580101 000000 20131204 001450 20131204 003003 15.2 """ max_elevation = self.orb.get_observer_look(self.uptime, *coords)[1] anl = self.orb.get_lonlatalt(self.orb.get_last_an_time( self.risetime))[0] % 360 # anl = self.orb.get_observer_look(self.risetime, *coords)[0] if self.rec: rec = "Y" else: rec = "N" line_list = [ "{satellite:<16}", "{orbit:>6}", "{risetime}", "{falltime}", "{elevation:>4.1f}", "{duration:>4.1f}", "{anl:>5.1f}", "{rec:<3}", "{direction}", "N N none 0 19580101 000000", "{risetime}", "{falltime}", "{duration:>4.1f}", ] line = " ".join(line_list).format( satellite=self.satellite.name.upper(), orbit=self.orb.get_orbit_number(self.risetime), risetime=self.risetime.strftime("%Y%m%d %H%M%S"), falltime=self.falltime.strftime("%Y%m%d %H%M%S"), elevation=max_elevation, duration=(self.falltime - self.risetime).seconds / 60.0, anl=anl, rec=rec, direction=self.pass_direction().capitalize()[:3]) return line HOST = "ftp://is.sci.gsfc.nasa.gov/ancillary/ephemeris/schedule/%s/downlink/" def get_aqua_terra_dumps(start_time, end_time, satorb, sat, dump_url=None): """ Get the Terra and Aqua overpasses taking into account the fact that when there are global dumps there is no direct broadcast """ # Get the list of aqua/terra dump info: dump_info_list = get_aqua_terra_dumpdata_from_ftp(sat, dump_url) dumps = [] for elem in dump_info_list: if elem['los'] >= start_time and elem['aos'] <= end_time: uptime = elem['aos'] + (elem['los'] - elem['aos']) / 2 overpass = Pass(sat, elem['aos'], elem['los'], orb=satorb, uptime=uptime, instrument="modis") overpass.station = elem['station'] overpass.max_elev = elem['elev'] dumps.append(overpass) return dumps def get_aqua_terra_dumpdata_from_ftp(sat, dump_url): """ Get the information on the internet on the actual global dumps of Terra and Aqua """ logger.info("Fetch %s dump info from internet", str(sat.name)) if isinstance(dump_url, str): url = urlparse(dump_url % sat.name) else: url = urlparse(HOST % sat.name) logger.debug("Connect to ftp server") try: f = ftplib.FTP_TLS(url.netloc) except (socket.error, socket.gaierror) as e: logger.error('cannot reach to %s ' % HOST + str(e)) f = None if f is not None: try: f.login('anonymous', 'guest') logger.debug("Logged in") except ftplib.error_perm: logger.error('cannot login anonymously') f.quit() f = None if f is not None: data = [] try: f.prot_p() # explicitly call for protected transfer f.dir(url.path, data.append) except socket.error as e: logger.error("Can't get any data: " + str(e)) f.quit() f = None else: filenames = [line.split()[-1] for line in data] if f is None: logger.info("Can't access ftp server, using cached data") filenames = glob.glob("/tmp/*.rpt") filenames = [ x for x in filenames if x.startswith("wotis.") and x.endswith(".rpt") ] dates = [ datetime.strptime("".join(filename.split(".")[2:4]), "%Y%j%H%M%S") for filename in filenames ] filedates = dict(zip(dates, filenames)) dumps = [] for date in sorted(dates): lines = [] if not os.path.exists(os.path.join("/tmp", filedates[date])): try: f.prot_p() # explicitly call for protected transfer f.retrlines('RETR ' + os.path.join(url.path, filedates[date]), lines.append) except ftplib.error_perm: logger.info("Permission error (???) on ftp server, skipping.") continue with open(os.path.join("/tmp", filedates[date]), "w") as fd_: for line in lines: fd_.write(line + "\n") else: with open(os.path.join("/tmp", filedates[date]), "r") as fd_: for line in fd_: lines.append(line) # for line in lines[7::2]: # if line.strip() == '': # break # station, aos, elev, los = line.split()[:4] # aos = datetime.strptime(aos, "%Y:%j:%H:%M:%S") # los = datetime.strptime(los, "%Y:%j:%H:%M:%S") # if los >= start_time and aos <= end_time: # uptime = aos + (los - aos) / 2 # overpass = Pass(sat, aos, los, orb=satorb, uptime=uptime, instrument="modis") # overpass.station = station # overpass.max_elev = elev # dumps.append(overpass) for line in lines[7::2]: if line.strip() == '': break station, aos, elev, los = line.split()[:4] aos = datetime.strptime(aos, "%Y:%j:%H:%M:%S") los = datetime.strptime(los, "%Y:%j:%H:%M:%S") dumps.append({'station': station, 'aos': aos, 'los': los, 'elev': elev}) if f is not None: f.quit() return dumps def get_next_passes(satellites, utctime, forward, coords, tle_file=None, aqua_terra_dumps=None, min_pass=MIN_PASS, local_horizon=0): """Get the next passes for *satellites*, starting at *utctime*, for a duration of *forward* hours, with observer at *coords* ie lon (°E), lat (°N), altitude (km). Uses *tle_file* if provided, downloads from celestrack otherwise. Metop-A, Terra and Aqua need special treatment due to downlink restrictions. """ passes = {} if tle_file is None and 'TLES' not in os.environ: fp_, tle_file = mkstemp(prefix="tle", dir="/tmp") os.close(fp_) logger.info("Fetch tle info from internet") tlefile.fetch(tle_file) if not os.path.exists(tle_file) and 'TLES' not in os.environ: logger.info("Fetch tle info from internet") tlefile.fetch(tle_file) for sat in satellites: if not hasattr(sat, 'name'): from trollsched.schedule import Satellite sat = Satellite(sat, 0, 0) satorb = orbital.Orbital(sat.name, tle_file=tle_file) passlist = satorb.get_next_passes(utctime, forward, horizon=local_horizon, *coords ) if sat.name.lower() == "metop-a": # Take care of metop-a special case passes["metop-a"] = get_metopa_passes(sat, passlist, satorb) elif sat.name.lower() in ["aqua", "terra"] and aqua_terra_dumps: # Take care of aqua (dumps in svalbard and poker flat) # Get the Terra/Aqua passes and fill the passes dict: get_terra_aqua_passes(passes, utctime, forward, sat, passlist, satorb, aqua_terra_dumps) else: if sat.name.upper() in VIIRS_PLATFORM_NAMES: instrument = "viirs" elif sat.name.lower().startswith("metop") or sat.name.lower().startswith("noaa"): instrument = "avhrr" elif sat.name.lower() in ["aqua", "terra"]: # when aqua_terra_dumps=False instrument = "modis" elif sat.name.upper() in MERSI_PLATFORM_NAMES: instrument = "mersi" elif sat.name.upper() in MERSI2_PLATFORM_NAMES: instrument = "mersi-2" else: instrument = "unknown" passes[sat.name] = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument) for rtime, ftime, uptime in passlist if ftime - rtime > timedelta(minutes=MIN_PASS) ] return set(fctools_reduce(operator.concat, list(passes.values()))) def get_metopa_passes(sat, passlist, satorb): """Get the Metop-A passes, taking care that Metop-A doesn't transmit to ground everywhere """ metop_passes = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument='avhrr') for rtime, ftime, uptime in passlist if rtime < ftime ] passes = [] for overpass in metop_passes: if overpass.pass_direction() == "descending": new_rise = overpass.slsearch(60) if new_rise is not None and new_rise < overpass.falltime: overpass.risetime = new_rise # overpass has a boundary property, and it is not really needed here anyways! # overpass.boundary = SwathBoundary(overpass) if overpass.seconds() > MIN_PASS * 60: passes.append(overpass) return passes def get_terra_aqua_passes(passes, utctime, forward, sat, passlist, satorb, aqua_terra_dumps): """Get the Terra/Aqua passes, taking care that Terra and Aqua do not have direct broadcast when there are global dumps passes: The dictionary of satellite passes which is being built utctime: The start time (datetime object) forward: The number of hours ahead for which we will get the coming passes sat: The Satellite platform considered passlist: List of Pass objects satorb: Orbital instance for the actual satellite and tles considered aqua_terra_dumps: True or False or the actual URL to get info on Terra/Aqua dumps. If True, the default URL will be used. If False or None, no dump info will be considered. """ instrument = 'modis' wpcoords = (-75.457222, 37.938611, 0) passlist_wp = satorb.get_next_passes( utctime - timedelta(minutes=30), forward + 1, *wpcoords) wp_passes = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument) for rtime, ftime, uptime in passlist_wp if rtime < ftime ] svcoords = (15.399, 78.228, 0) passlist_sv = satorb.get_next_passes( utctime - timedelta(minutes=30), forward + 1, *svcoords) sv_passes = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument) for rtime, ftime, uptime in passlist_sv if rtime < ftime ] pfcoords = (-147.43, 65.12, 0.51) passlist_pf = satorb.get_next_passes( utctime - timedelta(minutes=30), forward + 1, *pfcoords) pf_passes = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument) for rtime, ftime, uptime in passlist_pf if rtime < ftime ] aqua_passes = [ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument) for rtime, ftime, uptime in passlist if rtime < ftime ] dumps = get_aqua_terra_dumps(utctime - timedelta(minutes=30), utctime + timedelta(hours=forward + 0.5), satorb, sat, aqua_terra_dumps) # remove the known dumps for dump in dumps: # print "*", dump.station, dump, dump.max_elev logger.debug("dump from ftp: " + str((dump.station, dump, dump.max_elev))) for i, sv_pass in enumerate(sv_passes): if sv_pass.overlaps(dump, timedelta(minutes=40)): sv_elevation = sv_pass.orb.get_observer_look( sv_pass.uptime, *svcoords)[1] logger.debug("Computed " + str(("SG", sv_pass, sv_elevation))) del sv_passes[i] for i, pf_pass in enumerate(pf_passes): if pf_pass.overlaps(dump, timedelta(minutes=40)): pf_elevation = pf_pass.orb.get_observer_look( pf_pass.uptime, *pfcoords)[1] logger.debug("Computed " + str(("PF", pf_pass, pf_elevation))) del pf_passes[i] for i, wp_pass in enumerate(wp_passes): if wp_pass.overlaps(dump, timedelta(minutes=40)): wp_elevation = wp_pass.orb.get_observer_look( wp_pass.uptime, *wpcoords)[1] logger.debug("Computed " + str(("WP", wp_pass, wp_elevation))) del wp_passes[i] # sort out dump passes first # between sv an pf, we take the one with the highest elevation if # pf < 20°, pf otherwise # I think wp is also used if sv is the only other alternative used_pf = [] for sv_pass in sv_passes: found_pass = False for pf_pass in pf_passes: if sv_pass.overlaps(pf_pass): found_pass = True used_pf.append(pf_pass) sv_elevation = sv_pass.orb.get_observer_look( sv_pass.uptime, *svcoords)[1] pf_elevation = pf_pass.orb.get_observer_look( pf_pass.uptime, *pfcoords)[1] if pf_elevation > 20: dumps.append(pf_pass) elif sv_elevation > pf_elevation: dumps.append(sv_pass) else: dumps.append(pf_pass) break if not found_pass: dumps.append(sv_pass) for pf_pass in pf_passes: if pf_pass not in used_pf: dumps.append(pf_pass) passes[sat.name] = [] for overpass in aqua_passes: add = True for dump_pass in dumps: if dump_pass.overlaps(overpass): if (dump_pass.uptime < overpass.uptime and dump_pass.falltime > overpass.risetime): logger.debug("adjusting " + str(overpass) + " to new risetime " + str(dump_pass.falltime)) overpass.risetime = dump_pass.falltime overpass.boundary = SwathBoundary(overpass) elif (dump_pass.uptime >= overpass.uptime and dump_pass.risetime < overpass.falltime): logger.debug("adjusting " + str(overpass) + " to new falltime " + str(dump_pass.risetime)) overpass.falltime = dump_pass.risetime overpass.boundary = SwathBoundary(overpass) if overpass.falltime <= overpass.risetime: add = False logger.debug("skipping " + str(overpass)) if add and overpass.seconds() > MIN_PASS * 60: passes[sat.name].append(overpass) return pytroll-schedule-0.6.0/trollsched/schedule.py000066400000000000000000001126661415440646100213460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2013 - 2019 PyTroll # Author(s): # Martin Raspaud # Alexander Maul # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Scheduling """ import logging import logging.handlers import os from urllib.parse import urlparse from datetime import datetime, timedelta from pprint import pformat import numpy as np from pyorbital import astronomy try: from pyresample import parse_area_file except ImportError: # Older versions of pyresample: from pyresample.utils import parse_area_file from trollsched import utils from trollsched.spherical import get_twilight_poly from trollsched.graph import Graph from trollsched.satpass import get_next_passes, SimplePass from pyresample.boundary import AreaDefBoundary from trollsched.combine import get_combined_sched logger = logging.getLogger(__name__) # name/id for centre/org creating schedules CENTER_ID = "SMHI" class Station(object): """docstring for Station.""" def __init__(self, station_id, name, longitude, latitude, altitude, area, satellites, area_file=None, min_pass=None, local_horizon=0): super(Station, self).__init__() self.id = station_id self.name = name self.longitude = longitude self.latitude = latitude self.altitude = altitude self.area = area self.satellites = satellites if area_file is not None: try: self.area = parse_area_file(area_file, area)[0] except TypeError: pass self.min_pass = min_pass self.local_horizon = local_horizon @property def coords(self): return self.longitude, self.latitude, self.altitude def single_station(self, sched, start_time, tle_file): """Calculate passes, graph, and schedule for one station.""" logger.debug("station: %s coords: %s area: %s scores: %s", self.id, self.coords, self.area.area_id, self.satellites) opts = sched.opts pattern = sched.patterns pattern_args = { "station": self.id, "output_dir": opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } if opts.xml: pattern_args['mode'] = "request" elif opts.report: pattern_args['mode'] = "report" logger.info("Computing next satellite passes") allpasses = get_next_passes(self.satellites, start_time, sched.forward, self.coords, tle_file, aqua_terra_dumps=(sched.dump_url or True if opts.no_aqua_terra_dump else None), min_pass=self.min_pass, local_horizon=self.local_horizon ) logger.info("Computation of next overpasses done") logger.debug(str(sorted(allpasses, key=lambda x: x.risetime))) area_boundary = AreaDefBoundary(self.area, frequency=500) self.area.poly = area_boundary.contour_poly if opts.plot: logger.info("Saving plots to %s", build_filename( "dir_plots", pattern, pattern_args)) from threading import Thread image_saver = Thread( target=save_passes, args=(allpasses, self.area.poly, build_filename( "dir_plots", pattern, pattern_args), sched.plot_parameters, sched.plot_title ) ) image_saver.start() if opts.avoid is not None: avoid_list = get_passes_from_xml_file(opts.avoid) else: avoid_list = None logger.info("computing best schedule for area %s" % self.area.area_id) schedule, (graph, labels) = get_best_sched(allpasses, self.area, timedelta(seconds=opts.delay), avoid_list) logger.debug(pformat(schedule)) for opass in schedule: opass.rec = True logger.info("generating file") if opts.scisys: generate_sch_file(build_filename("file_sci", pattern, pattern_args), allpasses, self.coords) if opts.meos: generate_meos_file(build_filename("file_meos", pattern, pattern_args), allpasses, self.coords, start_time + timedelta(hours=sched.start), True) # Ie report mode if opts.plot: logger.info("Waiting for images to be saved...") image_saver.join() logger.info("Done!") if opts.metno_xml: generate_metno_xml_file(build_filename("file_metno_xml", pattern, pattern_args), allpasses, self.coords, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), self.id, sched.center_id, True) if opts.xml or opts.report: url = urlparse(opts.output_url or opts.output_dir) if opts.xml or opts.report: """Allways create xml-file in request-mode""" pattern_args['mode'] = "request" xmlfile = generate_xml_file(allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename( "file_xml", pattern, pattern_args), self.id, sched.center_id, False ) logger.info("Generated " + str(xmlfile)) send_file(url, xmlfile) if opts.report: """'If report-mode was set""" pattern_args['mode'] = "report" xmlfile = generate_xml_file(allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename( "file_xml", pattern, pattern_args), self.id, sched.center_id, True ) logger.info("Generated " + str(xmlfile)) if opts.graph or opts.comb: graph.save(build_filename("file_graph", pattern, pattern_args)) graph.export( labels=[str(label) for label in labels], filename=build_filename("file_graph", pattern, pattern_args) + ".gv" ) if opts.comb: import pickle ph = open(os.path.join(build_filename("dir_output", pattern, pattern_args), "allpasses.%s.pkl" % self.id), "wb") pickle.dump(allpasses, ph) ph.close() return graph, allpasses class SatScore(object): """docstring for SatScore.""" def __init__(self, day, night): super(SatScore, self).__init__() self.day = day self.night = night class Satellite(object): """docstring for Satellite.""" def __init__(self, name, day, night, schedule_name=None, international_designator=None): super(Satellite, self).__init__() self.name = name self.international_designator = international_designator self.score = SatScore(day, night) self.schedule_name = schedule_name or name class Scheduler(object): """docstring for Scheduler.""" def __init__(self, stations, min_pass, forward, start, dump_url, patterns, center_id, plot_parameters, plot_title): super(Scheduler, self).__init__() self.stations = stations self.min_pass = min_pass self.forward = forward self.start = start self.dump_url = dump_url self.patterns = patterns self.center_id = center_id self.plot_parameters = plot_parameters self.plot_title = plot_title self.opts = None def conflicting_passes(allpasses, delay=timedelta(seconds=0)): """Get the passes in groups of conflicting passes. """ passes = sorted(allpasses, key=lambda x: x.risetime) overpass = passes[0] last_time = overpass.falltime group = [overpass] groups = [] for overpass in passes[1:]: if overpass.risetime - delay < last_time: group.append(overpass) if last_time < overpass.falltime: last_time = overpass.falltime else: groups.append(group) group = [overpass] last_time = overpass.falltime groups.append(group) return groups def get_non_conflicting_groups(passes, delay=timedelta(seconds=0)): """Get the different non-conflicting solutions in a group of conflicting passes. """ # Uses graphs and maximal clique finding with the Bron-Kerbosch algorithm. order = len(passes) if order == 1: return [passes] graph = Graph(order) for i, overpass in enumerate(sorted(passes, key=lambda x: x.risetime)): for j in range(i + 1, order): if not overpass.overlaps(passes[j], delay): graph.add_edge(i, j) groups = [] for res in graph.bron_kerbosch(set(), set(graph.vertices), set()): grp = [] for vertex in res: grp.append(passes[vertex]) groups.append(sorted(grp)) return groups def fermia(t): a = 0.25 b = a / 4 k = b * np.log(1 / 3.0) + a sh = k - 0.25 return 0.5 / (np.exp(((t + sh) - a) / b) + 1) + 0.5 def fermib(t): a = 0.25 b = a / 4 return 1 / (np.exp((t - a) / b) + 1) combination = {} def combine(p1, p2, area_of_interest): """Combine passes together. """ try: return combination[p1, p2] except KeyError: pass area = area_of_interest.poly.area() def pscore(poly, coeff=1): if poly is None: return 0 else: return poly.area() * coeff twi1 = get_twilight_poly(p1.uptime) twi2 = get_twilight_poly(p2.uptime) ip1, sip1 = p1.score.get(area_of_interest, (None, None)) if sip1 is None: ip1 = p1.boundary.contour_poly.intersection(area_of_interest.poly) # FIXME: ip1 or ip2 could be None if the pass is entirely inside the # area (or vice versa) if ip1 is None: return 0 ip1d = ip1.intersection(twi1) if ip1d is None: lon, lat = np.rad2deg(ip1.vertices[0, :]) theta = astronomy.cos_zen(p1.uptime, lon, lat) if np.sign(theta) > 0: ip1d = ip1 ip1n = None else: ip1n = ip1 else: twi1.invert() ip1n = ip1.intersection(twi1) twi1.invert() ns1 = pscore(ip1n, p1.satellite.score.night / area) ds1 = pscore(ip1d, p1.satellite.score.day / area) sip1 = ns1 + ds1 p1.score[area_of_interest] = (ip1, sip1) ip2, sip2 = p2.score.get(area_of_interest, (None, None)) if sip2 is None: ip2 = p2.boundary.contour_poly.intersection(area_of_interest.poly) if ip2 is None: return 0 ip2d = ip2.intersection(twi2) if ip2d is None: lon, lat = np.rad2deg(ip2.vertices[0, :]) theta = astronomy.cos_zen(p2.uptime, lon, lat) if np.sign(theta) > 0: ip2d = ip2 ip2n = None else: ip2n = ip2 else: twi2.invert() ip2n = ip2.intersection(twi2) twi2.invert() ns2 = pscore(ip2n, p2.satellite.score.night / area) ds2 = pscore(ip2d, p2.satellite.score.day / area) sip2 = ns2 + ds2 p2.score[area_of_interest] = (ip2, sip2) ip1p2 = ip1.intersection(ip2) if ip1p2 is None: sip1p2 = 0 else: ip1p2da = ip1p2.intersection(twi1) twi1.invert() ip1p2na = ip1p2.intersection(twi1) twi1.invert() ip1p2db = ip1p2.intersection(twi2) twi2.invert() ip1p2nb = ip1p2.intersection(twi2) twi2.invert() ns12a = pscore(ip1p2na, p1.satellite.score.night / area) ds12a = pscore(ip1p2da, p1.satellite.score.day / area) ns12b = pscore(ip1p2nb, p2.satellite.score.night / area) ds12b = pscore(ip1p2db, p2.satellite.score.day / area) sip1p2a = ns12a + ds12a sip1p2b = ns12b + ds12b sip1p2 = (sip1p2a + sip1p2b) / 2.0 if p2 > p1: tdiff = (p2.uptime - p1.uptime).seconds / 3600. else: tdiff = (p1.uptime - p2.uptime).seconds / 3600. res = fermia(tdiff) * (sip1 + sip2) - fermib(tdiff) * sip1p2 combination[p1, p2] = res return res def get_best_sched(overpasses, area_of_interest, delay, avoid_list=None): """Get the best schedule based on *area_of_interest*. """ avoid_list = avoid_list or [] passes = sorted(overpasses, key=lambda x: x.risetime) grs = conflicting_passes(passes, delay) logger.debug("conflicting %s", str(grs)) ncgrs = [get_non_conflicting_groups(gr, delay) for gr in grs] logger.debug("non conflicting %s", str(ncgrs)) n_vertices = len(passes) graph = Graph(n_vertices=n_vertices + 2) def add_arc(graph, p1, p2, hook=None): logger.debug("Adding arc between " + str(p1) + " and " + str(p2) + "...") if p1 in avoid_list or p2 in avoid_list: w = 0 logger.debug("...0 because in the avoid_list!") else: w = combine(p1, p2, area_of_interest) logger.debug("...with weight " + str(w)) # with open("/tmp/schedule.gv", "a") as fp_: # fp_.write(' "' + str(p1) + '" -> "' + str(p2) + # '" [ label = "' + str(w) + '" ];\n') graph.add_arc(passes.index(p1) + 1, passes.index(p2) + 1, w) if hook is not None: hook() prev = set() for ncgr in ncgrs: for pr in prev: foll = set(gr[0] for gr in ncgr) for f in foll: add_arc(graph, pr, f) prev = set(sorted(gr, key=lambda x: x.falltime)[-1] for gr in ncgr) for gr in ncgr: if len(gr) > 1: for p1, p2 in zip(gr[:-1], gr[1:]): add_arc(graph, p1, p2) for pr in prev: graph.add_arc(passes.index(pr) + 1, n_vertices + 1) for first in ncgrs[0][0]: graph.add_arc(0, passes.index(first) + 1) dist, path = graph.dag_longest_path(0, n_vertices + 1) del dist return [passes[idx - 1] for idx in path[1:-1]], (graph, passes) def argmax(iterable): return max((x, i) for i, x in enumerate(iterable))[1] def get_max(groups, fun): """Get the best group of *groups* using the score function *fun* """ scores = [] for grp in groups: scores.append(sum([fun(p) for p in grp])) return groups[argmax(scores)] def generate_metno_xml_file(output_file, allpasses, coords, start, end, station_name, center_id, report_mode=False): import xml.etree.ElementTree as ET reqtime = datetime.utcnow() with open(output_file, "w") as out: out.write("") root = ET.Element("acquisition-schedule") props = ET.SubElement(root, "properties") proj = ET.SubElement(props, "project") proj.text = "Pytroll" typep = ET.SubElement(props, "type") if report_mode: typep.text = "report" else: typep.text = "request" station = ET.SubElement(props, "station") station.text = station_name file_start = ET.SubElement(props, "file-start") file_start.text = start.strftime("%Y-%m-%dT%H:%M:%S") file_end = ET.SubElement(props, "file-end") file_end.text = end.strftime("%Y-%m-%dT%H:%M:%S") reqby = ET.SubElement(props, "requested-by") reqby.text = center_id reqon = ET.SubElement(props, "requested-on") reqon.text = reqtime.strftime("%Y-%m-%dT%H:%M:%S") for overpass in sorted(allpasses, key=lambda x: x.risetime): if (overpass.rec or report_mode) and overpass.risetime > start: overpass.generate_metno_xml(coords, root) out.write(ET.tostring(root).decode("utf-8")) out.close() return output_file def generate_meos_file(output_file, allpasses, coords, start, report_mode=False): with open(output_file, "w") as out: out.write(" No. Date Satellite Orbit Max EL AOS Ovlp LOS Durtn Az(AOS/MAX)\n") line_no = 1 for overpass in sorted(allpasses, key=lambda x: x.risetime): if (overpass.rec or report_mode) and overpass.risetime > start: out.write(overpass.print_meos(coords, line_no) + "\n") line_no += 1 out.close() return output_file def generate_sch_file(output_file, overpasses, coords): with open(output_file, "w") as out: # create epochs out.write("#Orbital elements\n#\n#SCName Epochtime\n#\n") satellites = set() for overpass in overpasses: epoch = "!{0:<16} {1}".format(overpass.satellite.name.upper(), overpass.orb.tle.epoch.strftime("%Y%m%d %H%M%S")) satellites |= set([epoch]) sats = "\n".join(satellites) + "\n" out.write(sats) out.write("#\n#\n#Pass List\n#\n") out.write( "#SCName RevNum Risetime Falltime Elev Dura ANL Rec Dir Man Ovl OvlSCName " "OvlRev OvlRisetime OrigRisetime OrigFalltime OrigDuration\n#\n") for overpass in sorted(overpasses): out.write(overpass.print_vcs(coords) + "\n") def generate_xml_requests(sched, start, end, station_name, center_id, report_mode=False): """Create xml requests. """ import xml.etree.ElementTree as ET reqtime = datetime.utcnow() eum_format = "%Y-%m-%d-%H:%M:%S" root = ET.Element("acquisition-schedule") props = ET.SubElement(root, "properties") proj = ET.SubElement(props, "project") proj.text = "Pytroll" typep = ET.SubElement(props, "type") if report_mode: typep.text = "report" else: typep.text = "request" station = ET.SubElement(props, "station") station.text = station_name file_start = ET.SubElement(props, "file-start") file_start.text = start.strftime(eum_format) file_end = ET.SubElement(props, "file-end") file_end.text = end.strftime(eum_format) reqby = ET.SubElement(props, "requested-by") reqby.text = center_id reqon = ET.SubElement(props, "requested-on") reqon.text = reqtime.strftime(eum_format) for overpass in sorted(sched): if (overpass.rec or report_mode) and overpass.risetime > start: ovpass = ET.SubElement(root, "pass") sat_name = overpass.satellite.schedule_name or overpass.satellite.name ovpass.set("satellite", sat_name) ovpass.set("start-time", overpass.risetime.strftime(eum_format)) ovpass.set("end-time", overpass.falltime.strftime(eum_format)) if report_mode: if overpass.fig is not None: ovpass.set("img", overpass.fig) ovpass.set("rec", str(overpass.rec)) return root, reqtime def generate_xml_file(sched, start, end, xml_file, station, center_id, report_mode=False): """Create an xml request file. """ import xml.etree.ElementTree as ET tree, reqtime = generate_xml_requests(sched, start, end, station, center_id, report_mode) filename = xml_file tmp_filename = xml_file + reqtime.strftime("%Y-%m-%d-%H-%M-%S") + ".tmp" with open(tmp_filename, "w") as fp_: if report_mode: fp_.write("" "") fp_.write(ET.tostring(tree).decode("utf-8")) os.rename(tmp_filename, filename) return filename def parse_datetime(strtime): """Parse the time string *strtime* """ return datetime.strptime(strtime, "%Y%m%d%H%M%S") def save_passes(allpasses, poly, output_dir, plot_parameters=None, plot_title=None): """Save overpass plots to png and store in directory *output_dir* """ from trollsched.drawing import save_fig for overpass in allpasses: save_fig(overpass, poly=poly, directory=output_dir, plot_parameters=plot_parameters, plot_title=plot_title) def get_passes_from_xml_file(filename): """Read passes from aquisition xml file.""" import xml.etree.ElementTree as ET tree = ET.parse(filename) root = tree.getroot() pass_list = [] for overpass in root.iter('pass'): start_time = datetime.strptime( overpass.attrib['start-time'], '%Y-%m-%d-%H:%M:%S') end_time = datetime.strptime( overpass.attrib['end-time'], '%Y-%m-%d-%H:%M:%S') pass_list.append(SimplePass( overpass.attrib['satellite'], start_time, end_time)) return pass_list def build_filename(pattern_name, pattern_dict, kwargs): """Build absolute path from pattern dictionary.""" for k in pattern_dict.keys(): for v in pattern_dict.values(): if "{" + k + "}" in v: kwargs[k] = pattern_dict[k].format(**kwargs) return pattern_dict[pattern_name].format(**kwargs) def send_file(url, file): pathname, filename = os.path.split(file) del pathname if url.scheme in ["file", ""]: pass elif url.scheme == "ftp": import ftplib session = ftplib.FTP(url.hostname, url.username, url.password) with open(file, "rb") as xfile: session.storbinary('STOR ' + str(filename), xfile) session.quit() else: logger.error("Cannot save to %s, but file is there:", str(url.scheme), str(file)) def combined_stations(scheduler, start_time, graph, allpasses): # opts, pattern, station_list, graph, allpasses, start_time, start, forward, center_id): """The works around the combination of schedules for two or more stations.""" logger.info("Generating coordinated schedules ...") def collect_labels(newpasses, stats): """Collect labels, each with one pass per station.""" # TODO: is there a simpler way? clabels = [] npasses = {s: set() for s in stats} for npass in newpasses: cl = [] for i, s in zip(range(len(stats)), stats): if npass[i][0] is None: cl.append("---") else: npasses[s].add(npass[i][0]) if npass[i][0].rec: cl.append("+ " + str(npass[i][0])) else: cl.append(" " + str(npass[i][0])) clabels.append("\\n".join(cl)) return clabels pattern_args = { "output_dir": scheduler.opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } if scheduler.opts.xml: pattern_args['mode'] = "request" elif scheduler.opts.report: pattern_args['mode'] = "report" passes = {} # reset flag "rec" for all passes. try: for s, ap in allpasses.items(): passes[s] = list(ap) for p in passes[s]: p.rec = False except Exception: logger.exception("Failed to reset 'rec' for s:%s ap:%s passes[s]:%s p:%s", s, ap, passes[s], p) raise stats, schedule, (newgraph, newpasses) = get_combined_sched(graph, passes) # logger.debug(pformat(schedule)) for opass in schedule: for i, ipass in zip(range(len(opass)), opass): if ipass[0] is None: continue ipass[0].rec = True logger.info("generating files") if scheduler.opts.graph: # save graph as npz file. pattern_args["station"] = "comb" newgraph.save(build_filename("file_graph", scheduler.patterns, pattern_args)) # Collect labels, each with one pass per station. clabels = collect_labels(newpasses, stats) # save graph as gv file for "dot"-plot newgraph.export(labels=[str(label) for label in clabels], filename=build_filename("file_graph", scheduler.patterns, pattern_args) + ".gv") for station_id in passes.keys(): pattern_args["station"] = station_id + "-comb" logger.info("Create schedule file(s) for %s", station_id) if scheduler.opts.scisys: generate_sch_file(build_filename("file_sci", scheduler.patterns, pattern_args), passes[station_id], [s.coords for s in scheduler.stations if s.id == station_id][0]) if scheduler.opts.xml or scheduler.opts.report: pattern_args['mode'] = "request" xmlfile = generate_xml_file(passes[station_id], start_time + timedelta(hours=scheduler.start), start_time + timedelta(hours=scheduler.forward), build_filename( "file_xml", scheduler.patterns, pattern_args), station_id, scheduler.center_id, False) logger.info("Generated " + str(xmlfile)) url = urlparse(scheduler.opts.output_url or scheduler.opts.output_dir) send_file(url, xmlfile) if scheduler.opts.report: pattern_args['mode'] = "report" xmlfile = generate_xml_file(passes[station_id], start_time + timedelta(hours=scheduler.start), start_time + timedelta(hours=scheduler.forward), build_filename( "file_xml", scheduler.patterns, pattern_args), # scheduler.stations[station_id].name, station_id, scheduler.center_id, True) logger.info("Generated " + str(xmlfile)) if scheduler.opts.meos: meosfile = generate_meos_file(build_filename("file_meos", scheduler.patterns, pattern_args), passes[station_id], # station_meta[station]['coords'], [s.coords for s in scheduler.stations if s.id == station_id][0], start_time + timedelta(hours=scheduler.start), False) # Ie only print schedule passes logger.info("Generated " + str(meosfile)) if scheduler.opts.metno_xml: metno_xmlfile = generate_metno_xml_file(build_filename("file_metno_xml", scheduler.patterns, pattern_args), passes[station_id], # station_meta[station]['coords'], [s.coords for s in scheduler.stations if s.id == station_id][0], start_time + timedelta(hours=scheduler.start), start_time + timedelta(hours=scheduler.forward), station_id, scheduler.center_id, False) logger.info("Generated " + str(metno_xmlfile)) logger.info("Finished coordinated schedules.") def run(): """The schedule command.""" import argparse global logger parser = argparse.ArgumentParser() # general arguments parser.add_argument("-c", "--config", default=None, help="configuration file to use") parser.add_argument("-t", "--tle", default=None, help="tle file to use") parser.add_argument("-l", "--log", default=None, help="File to log to (defaults to stdout)") parser.add_argument("-m", "--mail", nargs="*", default=None, help="mail address(es) to send error messages to.") parser.add_argument("-v", "--verbose", action="store_true", help="print debug messages too") # argument group: coordinates and times group_postim = parser.add_argument_group(title="start-parameter", description="(or set values in the configuration file)") group_postim.add_argument("--lat", type=float, help="Latitude, degrees north") group_postim.add_argument("--lon", type=float, help="Longitude, degrees east") group_postim.add_argument("--alt", type=float, help="Altitude, km") group_postim.add_argument("-f", "--forward", type=float, help="time ahead to compute the schedule") group_postim.add_argument("-s", "--start-time", type=parse_datetime, help="start time of the schedule to compute") group_postim.add_argument("-d", "--delay", default=60, type=float, help="delay (in seconds) needed between two " + "consecutive passes (60 seconds by default)") # argument group: special behaviour group_spec = parser.add_argument_group(title="special", description="(additional parameter changing behaviour)") group_spec.add_argument("-a", "--avoid", help="xml request file with passes to avoid") group_spec.add_argument("--no-aqua-terra-dump", action="store_false", help="do not consider Aqua/Terra-dumps") group_spec.add_argument("--multiproc", action="store_true", help="use multiple parallel processes") # argument group: output-related group_outp = parser.add_argument_group(title="output", description="(file pattern are taken from configuration file)") group_outp.add_argument("-o", "--output-dir", default=None, help="where to put generated files") group_outp.add_argument("-u", "--output-url", default=None, help="URL where to put generated schedule file(s)" + ", otherwise use output-dir") group_outp.add_argument("-x", "--xml", action="store_true", help="generate an xml request file (schedule)" ) group_outp.add_argument("-r", "--report", action="store_true", help="generate an xml report file (schedule)") group_outp.add_argument("--scisys", action="store_true", help="generate a SCISYS schedule file") group_outp.add_argument("-p", "--plot", action="store_true", help="generate plot images") group_outp.add_argument("-g", "--graph", action="store_true", help="save graph info") group_outp.add_argument("--meos", action="store_true", help="generate a MEOS schedule file") group_outp.add_argument("--metno-xml", action="store_true", help="generate a METNO xml pass data file") opts = parser.parse_args() if opts.config: # read_config() returns: # [(coords, station, area, scores)], forward, start, {pattern} # station_list, forward, start, pattern = utils.read_config(opts.config) scheduler = utils.read_config(opts.config) # TODO make config file compulsory if (not opts.config) and (not (opts.lon or opts.lat or opts.alt)): parser.error("Coordinates must be provided in the absence of " "configuration file.") if not (opts.xml or opts.scisys or opts.report or opts.metno_xml): parser.error("No output specified, use '--scisys' or '-x/--xml'") if opts.output_dir is None: opts.output_dir = os.path.curdir if "dir_output" not in scheduler.patterns: pattern["dir_output"] = opts.output_dir if opts.log: previous = os.path.exists(opts.log) handler = logging.handlers.RotatingFileHandler(opts.log, backupCount=7) if previous: handler.doRollover() else: handler = logging.StreamHandler() handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) if opts.verbose: loglevel = logging.DEBUG else: loglevel = logging.INFO handler.setLevel(loglevel) logging.getLogger('').setLevel(loglevel) logging.getLogger('').addHandler(handler) if opts.mail: mhandler = logging.handlers.SMTPHandler("localhost", "pytroll-schedule@pytroll.org", opts.mail, "Scheduler") mhandler.setLevel(logging.WARNING) logging.getLogger('').addHandler(mhandler) logger = logging.getLogger("trollsched") tle_file = opts.tle if opts.start_time: start_time = opts.start_time else: start_time = datetime.utcnow() allpasses = {} graph = {} logger.debug("start: %s forward: %s" % (scheduler.start, scheduler.forward)) pattern_args = { "output_dir": opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } dir_output = build_filename("dir_output", scheduler.patterns, pattern_args) if not os.path.exists(dir_output): logger.debug("Create output dir " + dir_output) os.makedirs(dir_output) if len(scheduler.stations) > 1: opts.comb = True import pickle ph = open(os.path.join(dir_output, "opts.pkl"), "wb") pickle.dump(opts, ph) ph.close() else: opts.comb = False scheduler.opts = opts # single- or multi-processing? if not opts.multiproc or len(scheduler.stations) == 1: # sequential processing all stations' single schedule. for station in scheduler.stations: graph[station.id], allpasses[station.id] = station.single_station(scheduler, start_time, tle_file) else: # processing the stations' single schedules with multiprocessing. process_single = {} statlst_ordered = [] # first round through the stations, forking sub-processes to do the # "single station calculations" in parallel. # the pickling of passes and graphs is done inside single_station(). for station in scheduler.stations: statlst_ordered.append(station.id) from multiprocessing import Process process_single[station.id] = Process( target=station.single_station, args=(scheduler, start_time, tle_file)) process_single[station.id].start() # second round through the stations, collecting the sub-processes and # their results. for station_id in statlst_ordered: process_single[station_id].join() pattern_args["station"] = station_id # load graph for station graph[station_id] = Graph() graph[station_id].load(build_filename( "file_graph", scheduler.patterns, pattern_args) + ".npz") # load pickled passes for station ph = open(os.path.join( dir_output, "allpasses.%s.pkl" % station_id), "rb") allpasses[station_id] = pickle.load(ph) ph.close() if opts.comb: combined_stations(scheduler, start_time, graph, allpasses) if __name__ == '__main__': try: run() except Exception: logger.exception("Something wrong happened!") raise pytroll-schedule-0.6.0/trollsched/spherical.py000066400000000000000000000254571415440646100215250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2013, 2014, 2015, 2018 Martin Raspaud # Author(s): # Martin Raspaud # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Some generalized spherical functions. base type is a numpy array of size (n, 2) (2 for lon and lats) """ import numpy as np import pyresample.spherical import logging logger = logging.getLogger(__name__) class SCoordinate(object): """Spherical coordinates """ def __init__(self, lon, lat): self.lon = lon self.lat = lat def cross2cart(self, point): """Compute the cross product, and convert to cartesian coordinates """ lat1 = self.lat lon1 = self.lon lat2 = point.lat lon2 = point.lon ad = np.sin(lat1 - lat2) * np.cos((lon1 - lon2) / 2.0) be = np.sin(lat1 + lat2) * np.sin((lon1 - lon2) / 2.0) c = np.sin((lon1 + lon2) / 2.0) f = np.cos((lon1 + lon2) / 2.0) g = np.cos(lat1) h = np.cos(lat2) i = np.sin(lon2 - lon1) res = CCoordinate(np.array([-ad * c + be * f, ad * f + be * c, g * h * i])) return res def to_cart(self): """Convert to cartesian. """ return CCoordinate(np.array([np.cos(self.lat) * np.cos(self.lon), np.cos(self.lat) * np.sin(self.lon), np.sin(self.lat)])) def distance(self, point): """Vincenty formula. """ dlambda = self.lon - point.lon num = ((np.cos(point.lat) * np.sin(dlambda)) ** 2 + (np.cos(self.lat) * np.sin(point.lat) - np.sin(self.lat) * np.cos(point.lat) * np.cos(dlambda)) ** 2) den = (np.sin(self.lat) * np.sin(point.lat) + np.cos(self.lat) * np.cos(point.lat) * np.cos(dlambda)) return np.arctan2(num ** .5, den) def hdistance(self, point): """Haversine formula """ return 2 * np.arcsin((np.sin((point.lat - self.lat) / 2.0) ** 2.0 + np.cos(point.lat) * np.cos(self.lat) * np.sin((point.lon - self.lon) / 2.0) ** 2.0) ** .5) def __ne__(self, other): return not self.__eq__(other) def __eq__(self, other): return np.allclose((self.lon, self.lat), (other.lon, other.lat)) def __str__(self): return str((np.rad2deg(self.lon), np.rad2deg(self.lat))) def __repr__(self): return str((np.rad2deg(self.lon), np.rad2deg(self.lat))) def __iter__(self): return [self.lon, self.lat].__iter__() class CCoordinate(object): """Cartesian coordinates """ def __init__(self, cart): self.cart = np.array(cart) def norm(self): """Euclidean norm of the vector. """ return np.sqrt(np.einsum('...i, ...i', self.cart, self.cart)) def normalize(self): """normalize the vector. """ self.cart /= np.sqrt(np.einsum('...i, ...i', self.cart, self.cart)) return self def cross(self, point): """cross product with another vector. """ return CCoordinate(np.cross(self.cart, point.cart)) def dot(self, point): """dot product with another vector. """ return np.inner(self.cart, point.cart) def __ne__(self, other): return not self.__eq__(other) def __eq__(self, other): return np.allclose(self.cart, other.cart) def __str__(self): return str(self.cart) def __repr__(self): return str(self.cart) def __add__(self, other): try: return CCoordinate(self.cart + other.cart) except AttributeError: return CCoordinate(self.cart + np.array(other)) def __radd__(self, other): return self.__add__(other) def __mul__(self, other): try: return CCoordinate(self.cart * other.cart) except AttributeError: return CCoordinate(self.cart * np.array(other)) def __rmul__(self, other): return self.__mul__(other) def to_spherical(self): return SCoordinate(np.arctan2(self.cart[1], self.cart[0]), np.arcsin(self.cart[2])) EPSILON = 0.0000001 def modpi(val, mod=np.pi): """Puts *val* between -*mod* and *mod*. """ return (val + mod) % (2 * mod) - mod class Arc(object): """An arc of the great circle between two points. """ start = None end = None def __init__(self, start, end): self.start, self.end = start, end def __eq__(self, other): if(self.start == other.start and self.end == other.end): return 1 return 0 def __ne__(self, other): return not self.__eq__(other) def __str__(self): return (str(self.start) + " -> " + str(self.end)) def __repr__(self): return (str(self.start) + " -> " + str(self.end)) def angle(self, other_arc): """Oriented angle between two arcs. """ if self.start == other_arc.start: a__ = self.start b__ = self.end c__ = other_arc.end elif self.start == other_arc.end: a__ = self.start b__ = self.end c__ = other_arc.start elif self.end == other_arc.end: a__ = self.end b__ = self.start c__ = other_arc.start elif self.end == other_arc.start: a__ = self.end b__ = self.start c__ = other_arc.end else: raise ValueError("No common point in angle computation.") ua_ = a__.cross2cart(b__) ub_ = a__.cross2cart(c__) val = ua_.dot(ub_) / (ua_.norm() * ub_.norm()) if abs(val - 1) < EPSILON: angle = 0 elif abs(val + 1) < EPSILON: angle = np.pi else: angle = np.arccos(val) n__ = ua_.normalize() if n__.dot(c__.to_cart()) > 0: return -angle else: return angle def intersections(self, other_arc): """Gives the two intersections of the greats circles defined by the current arc and *other_arc*. From http://williams.best.vwh.net/intersect.htm """ if self.end.lon - self.start.lon > np.pi: self.end.lon -= 2 * np.pi if other_arc.end.lon - other_arc.start.lon > np.pi: other_arc.end.lon -= 2 * np.pi if self.end.lon - self.start.lon < -np.pi: self.end.lon += 2 * np.pi if other_arc.end.lon - other_arc.start.lon < -np.pi: other_arc.end.lon += 2 * np.pi ea_ = self.start.cross2cart(self.end).normalize() eb_ = other_arc.start.cross2cart(other_arc.end).normalize() cross = ea_.cross(eb_) lat = np.arctan2(cross.cart[2], np.sqrt(cross.cart[0] ** 2 + cross.cart[1] ** 2)) lon = np.arctan2(cross.cart[1], cross.cart[0]) return (SCoordinate(lon, lat), SCoordinate(modpi(lon + np.pi), -lat)) def intersects(self, other_arc): """Says if two arcs defined by the current arc and the *other_arc* intersect. An arc is defined as the shortest tracks between two points. """ return bool(self.intersection(other_arc)) def intersection(self, other_arc): """Says where, if two arcs defined by the current arc and the *other_arc* intersect. An arc is defined as the shortest tracks between two points. """ if self == other_arc: return None # if (self.end == other_arc.start or # self.end == other_arc.end or # self.start == other_arc.start or # self.start == other_arc.end): # return None for i in self.intersections(other_arc): a__ = self.start b__ = self.end c__ = other_arc.start d__ = other_arc.end ab_ = a__.hdistance(b__) cd_ = c__.hdistance(d__) if(((i in (a__, b__)) or (abs(a__.hdistance(i) + b__.hdistance(i) - ab_) < EPSILON)) and ((i in (c__, d__)) or (abs(c__.hdistance(i) + d__.hdistance(i) - cd_) < EPSILON))): return i return None def get_next_intersection(self, arcs, known_inter=None): """Get the next intersection between the current arc and *arcs* """ res = [] for arc in arcs: inter = self.intersection(arc) if (inter is not None and inter != arc.end and inter != self.end): res.append((inter, arc)) def dist(args): """distance key. """ return self.start.distance(args[0]) take_next = False for inter, arc in sorted(res, key=dist): if known_inter is not None: if known_inter == inter: take_next = True elif take_next: return inter, arc else: return inter, arc return None, None class SphPolygon(pyresample.spherical.SphPolygon): def draw(self, mapper, options, **more_options): lons = np.rad2deg(self.lon.take(np.arange(len(self.lon) + 1), mode="wrap")) lats = np.rad2deg(self.lat.take(np.arange(len(self.lat) + 1), mode="wrap")) rx, ry = mapper(lons, lats) mapper.plot(rx, ry, options, **more_options) def get_twilight_poly(utctime): """Return a polygon enclosing the sunlit part of the globe at *utctime*. """ from pyorbital import astronomy ra, dec = astronomy.sun_ra_dec(utctime) lon = modpi(ra - astronomy.gmst(utctime)) lat = dec vertices = np.zeros((4, 2)) vertices[0, :] = modpi(lon - np.pi / 2), 0 if lat <= 0: vertices[1, :] = lon, np.pi / 2 + lat vertices[3, :] = modpi(lon + np.pi), -(np.pi / 2 + lat) else: vertices[1, :] = modpi(lon + np.pi), np.pi / 2 - lat vertices[3, :] = lon, -(np.pi / 2 - lat) vertices[2, :] = modpi(lon + np.pi / 2), 0 return SphPolygon(vertices) pytroll-schedule-0.6.0/trollsched/tests/000077500000000000000000000000001415440646100203265ustar00rootroot00000000000000pytroll-schedule-0.6.0/trollsched/tests/__init__.py000066400000000000000000000022431415440646100224400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 - 2018 PyTroll Community # Author(s): # Martin Raspaud # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Tests for scheduler. """ from trollsched.tests import (test_schedule, test_spherical, test_satpass) import unittest def suite(): """The global test suite. """ mysuite = unittest.TestSuite() mysuite.addTests(test_schedule.suite()) mysuite.addTests(test_spherical.suite()) mysuite.addTests(test_satpass.suite()) return mysuite pytroll-schedule-0.6.0/trollsched/tests/test_satpass.py000066400000000000000000000466571415440646100234370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2021 Pytroll-schedule developers # Author(s): # Adam.Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Test the satellite pass and swath boundary classes """ import unittest import numpy as np from datetime import datetime, timedelta from trollsched.satpass import Pass from trollsched.boundary import SwathBoundary from pyorbital.orbital import Orbital from pyresample.geometry import AreaDefinition, create_area_def LONS1 = np.array([-122.29913729160562, -131.54385362589042, -155.788034272281, 143.1730880418349, 105.69172088208997, 93.03135571771092, 87.26010432019743, 83.98598584966442, 81.86683434871546, 80.37175346216411, 79.2509798668123, 78.37198926578984, 77.65800714027662, 77.06147400915819, 76.55132566889495, 76.10637628220547, 75.71164306799828, 75.35619180525052, 75.03181505238287, 74.73218847041143, 74.45231256197947, 74.18813012461848, 73.9362540393912, 73.69376447765231, 73.45804804675883, 73.22665809422263, 72.99717692793544, 72.76705638792168, 72.53339841609603, 72.2925978414254, 72.03965795937306, 71.76661774368146, 71.45957469190316, 57.97687872167697, 45.49802548616658, 34.788857347919546, 25.993525469714424, 18.88846123000295, 13.14317179269443, 8.450362684274728, -0.27010733525252295, -3.0648326302431794, -5.116189000358824, -6.73429807721795, -8.072680053386163, -9.21696007364773, -10.220171884036919, -11.11762132513045, -11.934120125548072, -12.687881125682765, -13.392781001351315, -14.059756511026736, -14.69771138916782, -15.314133712696703, -15.915536409615735, -16.507788289068856, -17.09637839792269, -17.686643087306685, -18.283978247944123, -18.894056410060063, -19.523069195727878, -20.17801994245519, -20.867100607022966, -21.600204055760642, -22.389653641849733, -23.251288693929943, -24.206153922914886, -25.283264445138713, -26.524411381004743, -27.993172418988525, -29.79361072725673, -32.11515837055801, -35.36860848223405, -35.38196057933595, -35.96564490844792, -37.14469461070555, -39.34032289002443, -43.49756191648018, -52.140150361811244, -73.32968630186114], dtype='float64') LATS1 = np.array([84.60636067724808, 86.98555849233523, 88.49911967556697, 88.90233393880413, 88.23555365613707, 87.41630911481282, 86.64939216187459, 85.94959841469182, 85.30839167814023, 84.71507625588431, 84.16010931725756, 83.63544438659248, 83.13431099825148, 82.65092034888734, 82.18020003036649, 81.71757084925224, 81.25875743723827, 80.79962022032255, 80.33599602524967, 79.86353436733512, 79.37751495806062, 78.87262831355378, 78.3426942980262, 77.78028071690198, 77.17616119674511, 76.51850934329316, 75.79164459606967, 74.97397797613992, 74.03443588562436, 72.92573674313518, 71.57038280824118, 69.82683886377178, 67.40109717220513, 67.03242839212335, 65.54326755696877, 63.11784822611803, 59.98023069591168, 56.32647323215378, 52.30373268534935, 48.01531077177335, 36.33799056582854, 37.200362356448125, 37.78169598891329, 38.210308430109684, 38.54535234179983, 38.8181101172057, 39.0470359762339, 39.24386487280032, 39.41648482997921, 39.57043267820405, 39.70973443234515, 39.83740623634436, 39.955767569171485, 40.06664498984812, 40.17150923539549, 40.271570238680745, 40.36784473887322, 40.46120553672548, 40.55241811035527, 40.64216822927882, 40.7310828091462, 40.819745180454284, 40.90870492549053, 40.99848114410508, 41.08955592221846, 41.18235086149538, 41.27717142920562, 41.37408580927609, 41.472661254399455, 41.57136466452366, 41.66608254408796, 41.745942562974314, 41.77850750277849, 54.62516158367828, 59.69624962433962, 64.7365168572082, 69.72588498397877, 74.61859631181376, 79.2863412851444, 83.25136141880888], dtype='float64') LONS2 = np.array([-174.41109502, 167.84584132, 148.24213696, 130.10334782, 115.7074828, 105.07369809, 97.28481583, 91.4618503, 86.98024241, 83.4283141, 80.53652225, 78.1253594, 76.07228855, 74.29143113, 72.72103408, 71.31559576, 70.04080412, 68.87020177, 67.78293355, 66.76218577, 65.79407472, 64.86682945, 63.97016605, 63.09478077, 62.23190558, 61.37287373, 60.50863405, 59.62912286, 58.72232744, 57.77268809, 56.75796498, 55.6419694, 54.36007027, 41.41762911, 41.15660793, 40.9331126, 40.73252665, 40.54677784, 40.37092304, 40.20150965, 40.0358693, 39.87175642, 39.70713409, 39.54002703, 39.36840323, 39.1900621, 39.00251256, 38.80282499, 38.58743647, 38.35188019, 38.09039231, 37.79531831, 37.45618154, 37.05815986, 36.57947382, 35.98665163, 35.22533847, 34.20085643, 32.73220377, 30.42514135, 26.23397747, 16.29417395, -23.91719576, -102.71481425, -122.5294795, -129.09284487], dtype='float64') LATS2 = np.array([83.23214786, 84.90973645, 85.62529048, 85.74243351, 85.52147568, 85.13874302, 84.69067959, 84.22338069, 83.75720094, 83.30023412, 82.85480916, 82.42053485, 81.9957309, 81.57810129, 81.16504231, 80.75376801, 80.34133891, 79.92463458, 79.50028749, 79.0645828, 78.61332046, 78.14162813, 77.64370408, 77.11245516, 76.5389713, 75.91173559, 75.21538754, 74.42869094, 73.52099029, 72.44554294, 71.12561977, 69.42093758, 67.03973793, 67.40770791, 69.8341456, 71.57844446, 72.93459921, 74.04414258, 74.98457279, 75.80317362, 76.53102217, 77.1897121, 77.79492994, 78.3585095, 78.88968633, 79.39590402, 79.88335693, 80.35737249, 80.8226939, 81.28370137, 81.74459732, 82.20957417, 82.68298027, 83.16949849, 83.67435372, 84.20356848, 84.76429067, 85.36521771, 86.01711637, 86.73327122, 87.5286869, 88.40887156, 89.21959299, 88.71884272, 87.09172665, 84.6670132], dtype='float64') LONS3 = np.array([-8.66259458, -6.20984986, 15.99813586, 25.41134052, 33.80598414, 48.28641356, 49.55596283, 45.21769275, 43.95449327, 30.04053601, 22.33028017, 13.90584249, -5.59290326, -7.75625031], dtype='float64') LATS3 = np.array([66.94713585, 67.07854554, 66.53108388, 65.27837805, 63.50223596, 58.33858588, 57.71210872, 55.14964148, 55.72506407, 60.40889798, 61.99561474, 63.11425455, 63.67173255, 63.56939058], dtype='float64') AREA_DEF_EURON1 = AreaDefinition('euron1', 'Northern Europe - 1km', '', {'proj': 'stere', 'ellps': 'WGS84', 'lat_0': 90.0, 'lon_0': 0.0, 'lat_ts': 60.0}, 3072, 3072, (-1000000.0, -4500000.0, 2072000.0, -1428000.0)) def assertNumpyArraysEqual(self, other): if self.shape != other.shape: raise AssertionError("Shapes don't match") if not np.allclose(self, other): raise AssertionError("Elements don't match!") def get_n20_orbital(): """Return the orbital instance for a given set of TLEs for NOAA-20. From 16 October 2018. """ tle1 = "1 43013U 17073A 18288.00000000 .00000042 00000-0 20142-4 0 2763" tle2 = "2 43013 098.7338 224.5862 0000752 108.7915 035.0971 14.19549169046919" return Orbital('NOAA-20', line1=tle1, line2=tle2) def get_n19_orbital(): """Return the orbital instance for a given set of TLEs for NOAA-19. From 16 October 2018. """ tle1 = "1 33591U 09005A 18288.64852564 .00000055 00000-0 55330-4 0 9992" tle2 = "2 33591 99.1559 269.1434 0013899 353.0306 7.0669 14.12312703499172" return Orbital('NOAA-19', line1=tle1, line2=tle2) def get_mb_orbital(): """Return orbital for a given set of TLEs for MetOp-B. From 2021-02-04 """ tle1 = "1 38771U 12049A 21034.58230818 -.00000012 00000-0 14602-4 0 9998" tle2 = "2 38771 98.6992 96.5537 0002329 71.3979 35.1836 14.21496632434867" return Orbital("Metop-B", line1=tle1, line2=tle2) class TestPass(unittest.TestCase): def setUp(self): """Set up""" self.n20orb = get_n20_orbital() self.n19orb = get_n19_orbital() def test_pass_instrument_interface(self): tstart = datetime(2018, 10, 16, 2, 48, 29) tend = datetime(2018, 10, 16, 3, 2, 38) instruments = set(('viirs', 'avhrr', 'modis', 'mersi', 'mersi-2')) for instrument in instruments: overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument=instrument) self.assertEqual(overp.instrument, instrument) instruments = set(('viirs', 'avhrr', 'modis')) overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments) self.assertEqual(overp.instrument, 'avhrr') instruments = set(('viirs', 'modis')) overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments) self.assertEqual(overp.instrument, 'viirs') instruments = set(('amsu-a', 'mhs')) self.assertRaises(TypeError, Pass, self, 'NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments) def tearDown(self): """Clean up""" pass class TestSwathBoundary(unittest.TestCase): def setUp(self): """Set up""" self.n20orb = get_n20_orbital() self.n19orb = get_n19_orbital() self.mborb = get_mb_orbital() self.euron1 = AREA_DEF_EURON1 self.antarctica = create_area_def( "antarctic", {'ellps': 'WGS84', 'lat_0': '-90', 'lat_ts': '-60', 'lon_0': '0', 'no_defs': 'None', 'proj': 'stere', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, width=1000, height=1000, area_extent=(-4008875.4031, -4000855.294, 4000855.9937, 4008874.7048)) self.arctica = create_area_def( "arctic", {'ellps': 'WGS84', 'lat_0': '90', 'lat_ts': '60', 'lon_0': '0', 'no_defs': 'None', 'proj': 'stere', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, width=1000, height=1000, area_extent=(-4008875.4031, -4000855.294, 4000855.9937, 4008874.7048)) def test_swath_boundary(self): tstart = datetime(2018, 10, 16, 2, 48, 29) tend = datetime(2018, 10, 16, 3, 2, 38) overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs') overp_boundary = SwathBoundary(overp) cont = overp_boundary.contour() assertNumpyArraysEqual(cont[0], LONS1) assertNumpyArraysEqual(cont[1], LATS1) tstart = datetime(2018, 10, 16, 4, 29, 4) tend = datetime(2018, 10, 16, 4, 30, 29, 400000) overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs') overp_boundary = SwathBoundary(overp, frequency=200) cont = overp_boundary.contour() assertNumpyArraysEqual(cont[0], LONS2) assertNumpyArraysEqual(cont[1], LATS2) # NOAA-19 AVHRR: tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S') tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S') overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr') overp_boundary = SwathBoundary(overp, frequency=500) cont = overp_boundary.contour() assertNumpyArraysEqual(cont[0], LONS3) assertNumpyArraysEqual(cont[1], LATS3) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3') overp_boundary = SwathBoundary(overp, frequency=500) cont = overp_boundary.contour() assertNumpyArraysEqual(cont[0], LONS3) assertNumpyArraysEqual(cont[1], LATS3) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr-3') overp_boundary = SwathBoundary(overp, frequency=500) cont = overp_boundary.contour() assertNumpyArraysEqual(cont[0], LONS3) assertNumpyArraysEqual(cont[1], LATS3) def test_swath_coverage(self): # NOAA-19 AVHRR: tstart = datetime.strptime('20181016 03:54:13', '%Y%m%d %H:%M:%S') tend = datetime.strptime('20181016 03:55:13', '%Y%m%d %H:%M:%S') overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr') cov = overp.area_coverage(self.euron1) self.assertEqual(cov, 0) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=80) cov = overp.area_coverage(self.euron1) self.assertEqual(cov, 0) tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S') tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S') overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr') cov = overp.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.103526, 5) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=100) cov = overp.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.103526, 5) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3', frequency=133) cov = overp.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.103526, 5) overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=300) cov = overp.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.103526, 5) # ASCAT and AVHRR on Metop-B: tstart = datetime.strptime("2019-01-02T10:19:39", "%Y-%m-%dT%H:%M:%S") tend = tstart + timedelta(seconds=180) tle1 = '1 38771U 12049A 19002.35527803 .00000000 00000+0 21253-4 0 00017' tle2 = '2 38771 98.7284 63.8171 0002025 96.0390 346.4075 14.21477776326431' mypass = Pass('Metop-B', tstart, tend, instrument='ascat', tle1=tle1, tle2=tle2) cov = mypass.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.322812, 5) mypass = Pass('Metop-B', tstart, tend, instrument='avhrr', tle1=tle1, tle2=tle2) cov = mypass.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.357324, 5) tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S") tend = tstart + timedelta(seconds=60*15.5) tle1 = '1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999' tle2 = '2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158' mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi2', tle1=tle1, tle2=tle2) cov = mypass.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.786836, 5) mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi-2', tle1=tle1, tle2=tle2) cov = mypass.area_coverage(self.euron1) self.assertAlmostEqual(cov, 0.786836, 5) def test_arctic_is_not_antarctic(self): tstart = datetime(2021, 2, 3, 16, 28, 3) tend = datetime(2021, 2, 3, 16, 31, 3) overp = Pass('Metop-B', tstart, tend, orb=self.mborb, instrument='avhrr') cov_south = overp.area_coverage(self.antarctica) cov_north = overp.area_coverage(self.arctica) assert cov_north == 0 assert cov_south != 0 def tearDown(self): """Clean up""" pass class TestPassList(unittest.TestCase): def setUp(self): """Set up""" pass def test_meos_pass_list(self): orig = (" 1 20190105 FENGYUN 3D 5907 52.943 01:01:45 n/a 01:17:15 15:30 18.6 107.4 -- " "Undefined(Scheduling not done 1546650105 ) a3d0df0cd289244e2f39f613f229a5cc D") tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S") tend = tstart + timedelta(seconds=60 * 15.5) tle1 = '1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999' tle2 = '2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158' mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi2', tle1=tle1, tle2=tle2) coords = (10.72, 59.942, 0.1) meos_format_str = mypass.print_meos(coords, line_no=1) self.assertEqual(meos_format_str, orig) mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi-2', tle1=tle1, tle2=tle2) coords = (10.72, 59.942, 0.1) meos_format_str = mypass.print_meos(coords, line_no=1) self.assertEqual(meos_format_str, orig) def test_generate_metno_xml(self): import xml.etree.ElementTree as ET root = ET.Element("acquisition-schedule") orig = ('') tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S") tend = tstart + timedelta(seconds=60 * 15.5) tle1 = '1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999' tle2 = '2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158' mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi2', tle1=tle1, tle2=tle2) coords = (10.72, 59.942, 0.1) mypass.generate_metno_xml(coords, root) # Dictionaries don't have guaranteed ordering in Python 3.7, so convert the strings to sets and compare them res = set(ET.tostring(root).decode("utf-8").split()) self.assertEqual(res, set(orig.split())) def tearDown(self): """Clean up""" pass def suite(): """The suite for test_satpass """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSwathBoundary)) mysuite.addTest(loader.loadTestsFromTestCase(TestPass)) mysuite.addTest(loader.loadTestsFromTestCase(TestPassList)) return mysuite pytroll-schedule-0.6.0/trollsched/tests/test_schedule.py000066400000000000000000000454721415440646100235470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 - 2019 PyTroll # Author(s): # Martin Raspaud # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Test the schedule module. """ import numpy as np from datetime import datetime, timedelta from trollsched.schedule import fermia, fermib, conflicting_passes from trollsched.schedule import parse_datetime, build_filename from pyresample.boundary import AreaBoundary from trollsched.satpass import get_next_passes from trollsched.satpass import get_aqua_terra_dumps from trollsched.satpass import get_metopa_passes import sys import unittest from unittest.mock import patch # class TestPass(unittest.TestCase): # def test_day(self): # satellite = "noaa 16" # tle1 = "1 26536U 00055A 13076.42963155 .00000201 00000-0 13237-3 0 1369" # tle2 = "2 26536 99.0540 128.2392 0010826 39.9070 85.2960 14.12848373643614" # orb = Orbital(satellite, line1=tle1, line2=tle2) # tstart = datetime(2013, 3, 18, 8, 15, 22, 352000) # tup = datetime(2013, 3, 18, 8, 22, 52, 352000) # tend = datetime(2013, 3, 18, 8, 30, 22, 352000) # overp = Pass(satellite, tstart, tend, orb, tup) # a little night # day = overp.day() # self.assertEquals(0.99735685408290298, day) # on the area of interest there is no night # area_of_interest = get_area_def("euron1") # day = overp.day(area_of_interest) # self.assertEquals(1.0, day) # tstart = datetime(2013, 3, 18, 8, 16, 22, 352000) # overp = Pass(satellite, tstart, tend, orb, tup) # an entire pass without night # day = overp.day() # self.assertEquals(1.0, day) class TestTools(unittest.TestCase): def test_conflicting_passes(self): class MyPass(object): def __init__(self, rise, fall): self.risetime = rise self.falltime = fall ref_time = datetime.utcnow() passes = [MyPass(ref_time, ref_time + timedelta(minutes=10)), MyPass(ref_time + timedelta(minutes=10.01), ref_time + timedelta(minutes=20))] self.assertEquals( len(conflicting_passes(passes, timedelta(seconds=0))), 2) self.assertEquals( len(conflicting_passes(passes, timedelta(seconds=60))), 1) class TestAreaBoundary(unittest.TestCase): def test_contour(self): side1_lons = np.arange(4) side1_lats = np.arange(4) + 20 side2_lons = np.arange(4) + 3 side2_lats = np.arange(4) + 20 + 3 side3_lons = np.arange(4) + 6 side3_lats = np.arange(4) + 20 + 6 side4_lons = np.arange(4) + 9 side4_lats = np.arange(4) + 20 + 9 bond = AreaBoundary((side1_lons, side1_lats), (side2_lons, side2_lats), (side3_lons, side3_lats), (side4_lons, side4_lats)) lons, lats = bond.contour() self.assertTrue(np.allclose(lons, np.arange(12))) self.assertTrue(np.allclose(lats, np.arange(12) + 20)) def test_decimate(self): side1_lons = np.arange(8) side1_lats = np.arange(8) + 30 side2_lons = np.arange(8) + 7 side2_lats = np.arange(8) + 30 + 7 side3_lons = np.arange(8) + 14 side3_lats = np.arange(8) + 30 + 14 side4_lons = np.arange(8) + 21 side4_lats = np.arange(8) + 30 + 21 bond = AreaBoundary((side1_lons, side1_lats), (side2_lons, side2_lats), (side3_lons, side3_lats), (side4_lons, side4_lats)) bond.decimate(5) lons, lats = bond.contour() self.assertTrue(np.allclose(lons, np.array([0, 1, 6, 7, 8, 13, 14, 15, 20, 21, 22, 27]))) self.assertTrue(np.allclose(lats, np.array([30, 31, 36, 37, 38, 43, 44, 45, 50, 51, 52, 57]))) class TestUtils(unittest.TestCase): def test_fermi(self): self.assertEquals(fermia(0.25), 0.875) self.assertEquals(fermib(0.25), 0.5) def test_parse_datetime(self): dtobj = parse_datetime('20190104110059') self.assertEqual(dtobj, datetime(2019, 1, 4, 11, 0, 59)) def test_build_filename(self): pattern_name = "dir_output" pattern_dict = {'file_xml': '{dir_output}/{date}-{time}-aquisition-schedule-{mode}-{station}.xml', 'file_sci': '{dir_output}/scisys-schedule-{station}.txt', 'dir_plots': '{dir_output}/plots.{station}', 'dir_output': '/tmp', 'file_graph': '{dir_output}/graph.{station}'} kwargs = {'date': '20190104', 'output_dir': '.', 'dir_output': '/tmp', 'time': '122023'} res = build_filename(pattern_name, pattern_dict, kwargs) self.assertEqual(res, '/tmp') pattern_name = "file_xml" kwargs = {'station': 'nrk', 'mode': 'request', 'time': '125334', 'date': '20190104', 'dir_output': '/tmp', 'output_dir': '.'} res = build_filename(pattern_name, pattern_dict, kwargs) self.assertEqual(res, '/tmp/20190104-125334-aquisition-schedule-request-nrk.xml') class TestAll(unittest.TestCase): def setUp(self): """Set up""" from pyorbital import orbital from trollsched.schedule import Satellite self.utctime = datetime(2018, 11, 28, 10, 0) self.satellites = ["noaa-20", ] self.tles = {'noaa-20': {}} self.tles['noaa-20']['line1'] = "1 43013U 17073A 18331.00000000 .00000048 00000-0 22749-4 0 3056" self.tles['noaa-20']['line2'] = "2 43013 098.7413 267.0121 0001419 108.5818 058.1314 14.19552981053016" self.aquas = ["aqua", ] self.terras = ["terra", ] self.terra = Satellite('terra', 0, 0) self.metopa = Satellite('metop-a', 0, 0) self.tles['aqua'] = {} self.tles['aqua']['line1'] = "1 27424U 02022A 18332.21220389 .00000093 00000-0 30754-4 0 9994" self.tles['aqua']['line2'] = "2 27424 98.2121 270.9368 0001045 343.9225 155.8703 14.57111538881313" self.tles['terra'] = {} self.tles['terra']['line1'] = "1 25994U 99068A 18338.20920286 .00000076 00000-0 26867-4 0 9999" self.tles['terra']['line2'] = "2 25994 98.2142 50.5750 0000577 102.5211 257.6060 14.57132862 8586" self.tles['metop-a'] = {} self.tles['metop-a']['line1'] = "1 29499U 06044A 18338.30873671 .00000000 00000+0 31223-4 0 00013" self.tles['metop-a']['line2'] = "2 29499 98.6045 31.7725 0001942 91.8780 346.4884 14.21536046629175" self.orb = orbital.Orbital('NOAA 20', line1=self.tles['noaa-20']['line1'], line2=self.tles['noaa-20']['line2']) self.aqua_orb = orbital.Orbital('AQUA', line1=self.tles['aqua']['line1'], line2=self.tles['aqua']['line2']) self.terra_orb = orbital.Orbital('TERRA', line1=self.tles['terra']['line1'], line2=self.tles['terra']['line2']) self.metopa_orb = orbital.Orbital('Metop-A', line1=self.tles['metop-a']['line1'], line2=self.tles['metop-a']['line2']) # These values were used to generate the get_next_passes list mock: # utctime = datetime(2018, 12, 4, 9, 0) # forward = 6 # coords = (16, 58, 0) self.metopa_passlist = [(datetime(2018, 12, 4, 9, 10, 4, 574801), datetime(2018, 12, 4, 9, 25, 29, 157194), datetime(2018, 12, 4, 9, 17, 48, 530484)), (datetime(2018, 12, 4, 10, 50, 23, 899232), datetime(2018, 12, 4, 11, 4, 2, 335184), datetime(2018, 12, 4, 10, 57, 13, 691637)), (datetime(2018, 12, 4, 12, 30, 24, 97160), datetime(2018, 12, 4, 12, 40, 42, 403698), datetime(2018, 12, 4, 12, 35, 33, 317647)), (datetime(2018, 12, 4, 14, 9, 1, 937869), datetime(2018, 12, 4, 14, 17, 20, 556654), datetime(2018, 12, 4, 14, 13, 11, 247497))] self.dumpdata = [ {'los': datetime(2018, 11, 28, 10, 0, 30), 'station': 'USAK05', 'aos': datetime(2018, 11, 28, 9, 50, 24), 'elev': '11.188'}, {'los': datetime(2018, 11, 28, 11, 39, 47), 'station': 'AS2', 'aos': datetime(2018, 11, 28, 11, 28, 51), 'elev': '39.235'}, {'los': datetime(2018, 11, 28, 13, 19, 8), 'station': 'USAK05', 'aos': datetime(2018, 11, 28, 13, 6, 36), 'elev': '58.249'}, {'los': datetime(2018, 11, 28, 14, 54, 25), 'station': 'AS2', 'aos': datetime(2018, 11, 28, 14, 44, 37), 'elev': '22.403'}, {'los': datetime(2018, 11, 28, 16, 27, 22), 'station': 'SG1', 'aos': datetime(2018, 11, 28, 16, 16, 58), 'elev': '9.521'} ] self.dumpdata_terra = [{'los': datetime(2018, 11, 20, 23, 24, 41), 'station': 'SG2', 'aos': datetime(2018, 11, 20, 23, 12, 32), 'elev': '17.4526'}, {'los': datetime(2018, 11, 22, 23, 19, 21), 'station': 'AS3', 'aos': datetime(2018, 11, 22, 23, 8, 55), 'elev': '28.9558'}, {'los': datetime(2018, 11, 22, 23, 19, 21), 'station': 'AS3', 'aos': datetime(2018, 11, 22, 23, 8, 55), 'elev': '28.9558'}, {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1', 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'}, {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1', 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'}, {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1', 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'}, {'los': datetime(2018, 11, 27, 23, 30, 44), 'station': 'SG2', 'aos': datetime(2018, 11, 27, 23, 18, 39), 'elev': '16.8795'}, {'los': datetime(2018, 11, 27, 23, 30, 44), 'station': 'SG2', 'aos': datetime(2018, 11, 27, 23, 18, 39), 'elev': '16.8795'}, {'los': datetime(2018, 11, 28, 22, 43, 53), 'station': 'USAK05', 'aos': datetime(2018, 11, 28, 22, 31, 57), 'elev': '40.9264'}, {'los': datetime(2018, 11, 28, 22, 43, 53), 'station': 'USAK05', 'aos': datetime(2018, 11, 28, 22, 31, 57), 'elev': '40.9264'}, {'los': datetime(2018, 11, 29, 23, 25, 11), 'station': 'USAK05', 'aos': datetime(2018, 11, 29, 23, 14, 47), 'elev': '26.9937'}, {'los': datetime(2018, 11, 29, 23, 25, 11), 'station': 'USAK05', 'aos': datetime(2018, 11, 29, 23, 14, 47), 'elev': '26.9937'}, {'los': datetime(2018, 11, 30, 22, 31, 3), 'station': 'AS2', 'aos': datetime(2018, 11, 30, 22, 19, 48), 'elev': '47.8599'}, {'los': datetime(2018, 12, 1, 1, 29, 2), 'station': 'WG1', 'aos': datetime(2018, 12, 1, 1, 21, 11), 'elev': '8.0543'}, {'los': datetime(2018, 11, 30, 22, 31, 3), 'station': 'AS2', 'aos': datetime(2018, 11, 30, 22, 19, 48), 'elev': '47.8599'}, {'los': datetime(2018, 12, 1, 1, 29, 2), 'station': 'WG1', 'aos': datetime(2018, 12, 1, 1, 21, 11), 'elev': '8.0543'}, {'los': datetime(2018, 12, 3, 1, 28, 14), 'station': 'SG2', 'aos': datetime(2018, 12, 3, 1, 17, 53), 'elev': '9.2428'}, {'los': datetime(2018, 12, 3, 22, 53, 35), 'station': 'SG1', 'aos': datetime(2018, 12, 3, 22, 41, 5), 'elev': '20.8371'}, {'los': datetime(2018, 12, 3, 22, 53, 35), 'station': 'SG1', 'aos': datetime(2018, 12, 3, 22, 41, 5), 'elev': '20.8371'}, {'los': datetime(2018, 12, 4, 23, 43, 5), 'station': 'AS2', 'aos': datetime(2018, 12, 4, 23, 33, 8), 'elev': '23.546'}] @patch('os.path.exists') def test_get_next_passes_viirs(self, exists): exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.orb.get_next_passes allpasses = get_next_passes(self.satellites, self.utctime, 4, (16, 58, 0), tle_file='nonexisting') self.assertEqual(len(allpasses), 2) rt1 = datetime(2018, 11, 28, 10, 53, 42, 79483) ft1 = datetime(2018, 11, 28, 11, 9, 6, 916787) rt2 = datetime(2018, 11, 28, 12, 34, 44, 667963) ft2 = datetime(2018, 11, 28, 12, 49, 25, 134067) rise_times = [p.risetime for p in allpasses] fall_times = [p.falltime for p in allpasses] assert rt1 in rise_times assert rt2 in rise_times assert ft1 in fall_times assert ft2 in fall_times assert all([p.instrument == 'viirs' for p in allpasses]) @patch('os.path.exists') @patch('trollsched.satpass.get_aqua_terra_dumpdata_from_ftp') def test_get_next_passes_with_aquadumps(self, dumps_from_ftp, exists): dumps_from_ftp.return_value = self.dumpdata exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.aqua_orb.get_next_passes allpasses = get_next_passes(self.aquas, self.utctime, 6, (16, 58, 0), tle_file='nonexisting', aqua_terra_dumps=True) self.assertEqual(len(allpasses), 3) rt1 = datetime(2018, 11, 28, 11, 12, 8, 728455) ft1 = datetime(2018, 11, 28, 11, 26, 8, 250021) rt2 = datetime(2018, 11, 28, 12, 50, 46, 574975) ft2 = datetime(2018, 11, 28, 13, 3, 53, 262440) rt3 = datetime(2018, 11, 28, 14, 33, 33, 973194) ft3 = datetime(2018, 11, 28, 14, 40, 10, 761405) for mypass in allpasses: dtmin = timedelta(seconds=10000000) for risetime in [rt1, rt2, rt3]: dt_ = abs(mypass.risetime - risetime) if dt_ < dtmin: dtmin = dt_ self.assertAlmostEqual(dtmin.seconds, 0) dtmin = timedelta(seconds=10000000) for falltime in [ft1, ft2, ft3]: dt_ = abs(mypass.falltime - falltime) if dt_ < dtmin: dtmin = dt_ self.assertAlmostEqual(dtmin.seconds, 0) self.assertEqual(mypass.instrument, 'modis') @patch('trollsched.satpass.get_aqua_terra_dumpdata_from_ftp') def test_get_aqua_terra_dumps(self, dumps_from_ftp): dumps_from_ftp.return_value = self.dumpdata_terra # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.terra_orb.get_next_passes dumps = get_aqua_terra_dumps(datetime(2018, 12, 3, 0, 0), datetime(2018, 12, 10, 0, 0), self.terra_orb, self.terra) self.assertEqual(len(dumps), 4) self.assertEqual(dumps[0].station, 'SG2') self.assertEqual(dumps[0].max_elev, '9.2428') self.assertEqual(dumps[0].pass_direction(), 'ascending') self.assertEqual((dumps[0].risetime - datetime(2018, 12, 3, 1, 17, 53)).seconds, 0) self.assertEqual((dumps[0].falltime - datetime(2018, 12, 3, 1, 28, 14)).seconds, 0) self.assertEqual(dumps[3].station, 'AS2') self.assertEqual(dumps[3].max_elev, '23.546') self.assertEqual(dumps[3].pass_direction(), 'descending') self.assertEqual((dumps[3].risetime - datetime(2018, 12, 4, 23, 33, 8)).seconds, 0) self.assertEqual((dumps[3].falltime - datetime(2018, 12, 4, 23, 43, 5)).seconds, 0) @patch('os.path.exists') def test_get_metopa_passes(self, exists): exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.metopa_orb.get_next_passes metopa_passes = get_metopa_passes(self.metopa, self.metopa_passlist, self.metopa_orb) self.assertEqual(len(metopa_passes), 2) self.assertEqual(metopa_passes[0].pass_direction(), 'descending') self.assertAlmostEqual(metopa_passes[0].seconds(), 487.512589, 5) self.assertEqual((metopa_passes[0].uptime - datetime(2018, 12, 4, 9, 17, 48, 530484)).seconds, 0) self.assertEqual((metopa_passes[0].risetime - datetime(2018, 12, 4, 9, 17, 21, 644605)).seconds, 0) def tearDown(self): """Clean up""" pass def suite(): """The suite for test_schedule """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestUtils)) mysuite.addTest(loader.loadTestsFromTestCase(TestAreaBoundary)) mysuite.addTest(loader.loadTestsFromTestCase(TestTools)) return mysuite pytroll-schedule-0.6.0/trollsched/tests/test_spherical.py000066400000000000000000000717011415440646100237170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2013, 2014, 2015, 2018 Martin Raspaud # Author(s): # Martin Raspaud # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Test cases for spherical geometry. """ from trollsched.spherical import SphPolygon, Arc, SCoordinate, CCoordinate import unittest import numpy as np class TestSCoordinate(unittest.TestCase): """Test SCoordinates. """ def test_distance(self): """Test Vincenty formula """ d = SCoordinate(0, 0).distance(SCoordinate(1, 1)) self.assertEquals(d, 1.2745557823062943) def test_hdistance(self): """Test Haversine formula """ d = SCoordinate(0, 0).hdistance(SCoordinate(1, 1)) self.assertTrue(np.allclose(d, 1.2745557823062943)) def test_str(self): """Check the string representation """ d = SCoordinate(0, 0) self.assertEqual(str(d), "(0.0, 0.0)") def test_repr(self): """Check the representation """ d = SCoordinate(0, 0) self.assertEqual(repr(d), "(0.0, 0.0)") class TestCCoordinate(unittest.TestCase): """Test SCoordinates. """ def test_str(self): """Check the string representation """ d = CCoordinate((0, 0, 0)) self.assertEqual(str(d), "[0 0 0]") def test_repr(self): """Check the representation """ d = CCoordinate((0, 0, 0)) self.assertEqual(repr(d), "[0 0 0]") def test_norm(self): """Euclidean norm of a cartesian vector """ d = CCoordinate((1, 0, 0)) self.assertEqual(d.norm(), 1.0) def test_normalize(self): """Normalize a cartesian vector """ d = CCoordinate((2., 0., 0.)) self.assertTrue(np.allclose(d.normalize().cart, [1, 0, 0])) def test_cross(self): """Test cross product in cartesian coordinates """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) self.assertTrue(np.allclose(d.cross(c).cart, [0., 0., 1.])) def test_dot(self): """Test the dot product of two cartesian vectors. """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) self.assertEqual(d.dot(c), 0) def test_ne(self): """Test inequality of two cartesian vectors. """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) self.assertTrue(c != d) def test_eq(self): """Test equality of two cartesian vectors. """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) self.assertFalse(c == d) def test_add(self): """Test adding cartesian vectors. """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) b = CCoordinate((1., 1., 0.)) self.assertTrue(np.allclose((d + c).cart, b.cart)) self.assertTrue(np.allclose((d + (0, 1, 0)).cart, b.cart)) self.assertTrue(np.allclose(((0, 1, 0) + d).cart, b.cart)) def test_mul(self): """Test multiplying (element-wise) cartesian vectors. """ d = CCoordinate((1., 0., 0.)) c = CCoordinate((0., 1., 0.)) b = CCoordinate((0., 0., 0.)) self.assertTrue(np.allclose((d * c).cart, b.cart)) self.assertTrue(np.allclose((d * (0, 1, 0)).cart, b.cart)) self.assertTrue(np.allclose(((0, 1, 0) * d).cart, b.cart)) def test_to_spherical(self): """Test converting to spherical coordinates. """ d = CCoordinate((1., 0., 0.)) c = SCoordinate(0, 0) self.assertEqual(d.to_spherical(), c) class TestArc(unittest.TestCase): """Test arcs """ def test_eq(self): arc1 = Arc(SCoordinate(0, 0), SCoordinate(np.deg2rad(10), np.deg2rad(10))) arc2 = Arc(SCoordinate(0, np.deg2rad(10)), SCoordinate(np.deg2rad(10), 0)) self.assertFalse(arc1 == arc2) self.assertTrue(arc1 == arc1) def test_ne(self): arc1 = Arc(SCoordinate(0, 0), SCoordinate(np.deg2rad(10), np.deg2rad(10))) arc2 = Arc(SCoordinate(0, np.deg2rad(10)), SCoordinate(np.deg2rad(10), 0)) self.assertTrue(arc1 != arc2) self.assertFalse(arc1 != arc1) def test_str(self): arc1 = Arc(SCoordinate(0, 0), SCoordinate(np.deg2rad(10), np.deg2rad(10))) self.assertEqual(str(arc1), str(arc1.start) + " -> " + str(arc1.end)) self.assertEqual(repr(arc1), str(arc1.start) + " -> " + str(arc1.end)) def test_intersection(self): arc1 = Arc(SCoordinate(0, 0), SCoordinate(np.deg2rad(10), np.deg2rad(10))) arc2 = Arc(SCoordinate(0, np.deg2rad(10)), SCoordinate(np.deg2rad(10), 0)) lon, lat = arc1.intersection(arc2) np.testing.assert_allclose(np.rad2deg(lon), 5) np.testing.assert_allclose(np.rad2deg(lat), 5.0575148968282093) arc1 = Arc(SCoordinate(0, 0), SCoordinate(np.deg2rad(10), np.deg2rad(10))) self.assertTrue(arc1.intersection(arc1) is None) arc1 = Arc(SCoordinate(np.deg2rad(24.341215776575297), np.deg2rad(44.987819588259327)), SCoordinate(np.deg2rad(18.842727517611817), np.deg2rad(46.512483610284178))) arc2 = Arc(SCoordinate(np.deg2rad(20.165961750361905), np.deg2rad(46.177305385810541)), SCoordinate(np.deg2rad(20.253297585831707), np.deg2rad(50.935830837274324))) inter = SCoordinate(np.deg2rad(20.165957021925202), np.deg2rad(46.177022633103398)) self.assertEquals(arc1.intersection(arc2), inter) arc1 = Arc(SCoordinate(np.deg2rad(-2.4982818108326734), np.deg2rad(48.596644847869655)), SCoordinate(np.deg2rad(-2.9571441235622835), np.deg2rad(49.165688435261394))) arc2 = Arc(SCoordinate(np.deg2rad(-3.4976667413531688), np.deg2rad(48.562704872921373)), SCoordinate(np.deg2rad(-5.893976312685715), np.deg2rad(48.445795283217116))) self.assertTrue(arc1.intersection(arc2) is None) def test_angle(self): arc1 = Arc(SCoordinate(np.deg2rad(157.5), np.deg2rad(89.234600944314138)), SCoordinate(np.deg2rad(90), np.deg2rad(89))) arc2 = Arc(SCoordinate(np.deg2rad(157.5), np.deg2rad(89.234600944314138)), SCoordinate(np.deg2rad(135), np.deg2rad(89))) self.assertAlmostEqual(np.rad2deg(arc1.angle(arc2)), -44.996385007218926, 13) arc1 = Arc(SCoordinate(np.deg2rad(112.5), np.deg2rad(89.234600944314138)), SCoordinate(np.deg2rad(90), np.deg2rad(89))) arc2 = Arc(SCoordinate(np.deg2rad(112.5), np.deg2rad(89.234600944314138)), SCoordinate(np.deg2rad(45), np.deg2rad(89))) self.assertAlmostEqual(np.rad2deg(arc1.angle(arc2)), 44.996385007218883, 13) arc1 = Arc(SCoordinate(0, 0), SCoordinate(1, 0)) self.assertEqual(arc1.angle(arc1), 0) arc2 = Arc(SCoordinate(1, 0), SCoordinate(0, 0)) self.assertEqual(arc1.angle(arc2), 0) arc2 = Arc(SCoordinate(0, 0), SCoordinate(-1, 0)) self.assertEqual(arc1.angle(arc2), np.pi) arc2 = Arc(SCoordinate(2, 0), SCoordinate(1, 0)) self.assertEqual(arc1.angle(arc2), np.pi) arc2 = Arc(SCoordinate(2, 0), SCoordinate(3, 0)) self.assertRaises(ValueError, arc1.angle, arc2) class TestSphericalPolygon(unittest.TestCase): """Test the spherical polygon. """ def test_area(self): """Test the area function """ vertices = np.array([[1, 2, 3, 4, 3, 2], [3, 4, 3, 2, 1, 2]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.00121732523118, polygon.area()) vertices = np.array([[1, 2, 3, 2], [3, 4, 3, 2]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.000608430665842, polygon.area()) vertices = np.array([[0, 0, 1, 1], [0, 1, 1, 0]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.000304609684862, polygon.area()) # Across the dateline vertices = np.array([[179.5, -179.5, -179.5, 179.5], [1, 1, 0, 0]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.000304609684862, polygon.area()) vertices = np.array([[0, 90, 90, 0], [1, 1, 0, 0]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.0349012696772, polygon.area()) vertices = np.array([[90, 0, 0], [0, 0, 90]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(np.pi / 2, polygon.area()) # Around the north pole vertices = np.array([[0, -90, 180, 90], [89, 89, 89, 89]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.000609265770322, polygon.area()) # Around the south pole vertices = np.array([[0, 90, 180, -90], [-89, -89, -89, -89]]).T polygon = SphPolygon(np.deg2rad(vertices)) self.assertAlmostEqual(0.000609265770322, polygon.area()) def test_is_inside(self): """Test checking if a polygon is inside of another. """ vertices = np.array([[1, 1, 20, 20], [1, 20, 20, 1]]).T polygon1 = SphPolygon(np.deg2rad(vertices)) vertices = np.array([[0, 0, 30, 30], [0, 30, 30, 0]]).T polygon2 = SphPolygon(np.deg2rad(vertices)) self.assertTrue(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) self.assertTrue(polygon2.area() > polygon1.area()) polygon2.invert() self.assertFalse(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) vertices = np.array([[0, 0, 30, 30], [21, 30, 30, 21]]).T polygon2 = SphPolygon(np.deg2rad(vertices)) self.assertFalse(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) polygon2.invert() self.assertTrue(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) vertices = np.array([[100, 100, 130, 130], [41, 50, 50, 41]]).T polygon2 = SphPolygon(np.deg2rad(vertices)) self.assertFalse(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) polygon2.invert() self.assertTrue(polygon1._is_inside(polygon2)) self.assertFalse(polygon2._is_inside(polygon1)) vertices = np.array([[-1.54009253, 82.62402855], [3.4804808, 82.8105746], [20.7214892, 83.00875812], [32.8857629, 82.7607758], [41.53844302, 82.36024339], [47.92062759, 81.91317164], [52.82785062, 81.45769791], [56.75107895, 81.00613046], [59.99843787, 80.56042986], [62.76998034, 80.11814453], [65.20076209, 79.67471372], [67.38577498, 79.22428], [69.39480149, 78.75981318], [71.28163984, 78.27283234], [73.09016378, 77.75277976], [74.85864685, 77.18594725], [76.62327682, 76.55367303], [78.42162204, 75.82918893], [80.29698409, 74.97171721], [82.30538638, 73.9143231], [84.52973107, 72.53535661], [87.11696138, 70.57600156], [87.79163209, 69.98712409], [72.98142447, 67.1760143], [61.79517279, 63.2846272], [53.50600609, 58.7098766], [47.26725347, 53.70533139], [42.44083259, 48.42199571], [38.59682041, 42.95008531], [35.45189206, 37.3452509], [32.43435578, 30.72373327], [31.73750748, 30.89485287], [29.37284023, 31.44344415], [27.66001308, 31.81016309], [26.31358296, 32.08057499], [25.1963477, 32.29313986], [24.23118049, 32.46821821], [23.36993508, 32.61780082], [22.57998837, 32.74952569], [21.8375532, 32.86857867], [21.12396693, 32.97868717], [20.42339605, 33.08268331], [19.72121983, 33.18284728], [19.00268283, 33.28113306], [18.2515215, 33.3793305], [17.4482606, 33.47919405], [16.56773514, 33.58255576], [15.57501961, 33.6914282], [14.4180087, 33.8080799], [13.01234319, 33.93498577], [11.20625437, 34.0742239], [8.67990371, 34.22415978], [7.89344478, 34.26018768], [8.69446485, 41.19823568], [9.25707165, 47.17351118], [9.66283477, 53.14128114], [9.84134875, 59.09937166], [9.65054241, 65.04458004], [8.7667375, 70.97023122], [6.28280904, 76.85731403]]) polygon1 = SphPolygon(np.deg2rad(vertices)) vertices = np.array([[49.94506701, 46.52610743], [51.04293649, 46.52610743], [62.02163129, 46.52610743], [73.0003261, 46.52610743], [83.9790209, 46.52610743], [85.05493299, 46.52610743], [85.05493299, 45.76549301], [85.05493299, 37.58315571], [85.05493299, 28.39260587], [85.05493299, 18.33178739], [85.05493299, 17.30750918], [83.95706351, 17.30750918], [72.97836871, 17.30750918], [61.9996739, 17.30750918], [51.0209791, 17.30750918], [49.94506701, 17.30750918], [49.94506701, 18.35262921], [49.94506701, 28.41192025], [49.94506701, 37.60055422], [49.94506701, 45.78080831]]) polygon2 = SphPolygon(np.deg2rad(vertices)) self.assertFalse(polygon2._is_inside(polygon1)) self.assertFalse(polygon1._is_inside(polygon2)) def test_bool(self): """Test the intersection and union functions. """ vertices = np.array([[180, 90, 0, -90], [89, 89, 89, 89]]).T poly1 = SphPolygon(np.deg2rad(vertices)) vertices = np.array([[-45, -135, 135, 45], [89, 89, 89, 89]]).T poly2 = SphPolygon(np.deg2rad(vertices)) uni = np.array([[157.5, 89.23460094], [-225., 89.], [112.5, 89.23460094], [90., 89.], [67.5, 89.23460094], [45., 89.], [22.5, 89.23460094], [0., 89.], [-22.5, 89.23460094], [-45., 89.], [-67.5, 89.23460094], [-90., 89.], [-112.5, 89.23460094], [-135., 89.], [-157.5, 89.23460094], [-180., 89.]]) inter = np.array([[157.5, 89.23460094], [112.5, 89.23460094], [67.5, 89.23460094], [22.5, 89.23460094], [-22.5, 89.23460094], [-67.5, 89.23460094], [-112.5, 89.23460094], [-157.5, 89.23460094]]) poly_inter = poly1.intersection(poly2) poly_union = poly1.union(poly2) self.assertTrue(poly_inter.area() <= poly_union.area()) self.assertTrue(np.allclose(poly_inter.vertices, np.deg2rad(inter))) self.assertTrue(np.allclose(poly_union.vertices, np.deg2rad(uni))) # Test 2 polygons sharing 2 contiguous edges. vertices1 = np.array([[-10, 10], [-5, 10], [0, 10], [5, 10], [10, 10], [10, -10], [-10, -10]]) vertices2 = np.array([[-5, 10], [0, 10], [5, 10], [5, -5], [-5, -5]]) vertices3 = np.array([[5, 10], [5, -5], [-5, -5], [-5, 10], [0, 10]]) poly1 = SphPolygon(np.deg2rad(vertices1)) poly2 = SphPolygon(np.deg2rad(vertices2)) poly_inter = poly1.intersection(poly2) self.assertTrue(np.allclose(poly_inter.vertices, np.deg2rad(vertices3))) # Test when last node of the intersection is the last vertice of the # second polygon. swath_vertices = np.array([[-115.32268301, 66.32946139], [-61.48397172, 58.56799254], [-60.25004314, 58.00754686], [-71.35057076, 49.60229517], [-113.746486, 56.03008985]]) area_vertices = np.array([[-68.32812107, 52.3480829], [-67.84993896, 53.07015692], [-55.54651296, 64.9254637], [-24.63341856, 74.24628796], [-31.8996363, 27.99907764], [-39.581043, 37.0639821], [-50.90185988, 45.56296169], [-67.43022017, 52.12399581]]) res = np.array([[-62.77837918, 59.12607053], [-61.48397172, 58.56799254], [-60.25004314, 58.00754686], [-71.35057076, 49.60229517], [-113.746486, 56.03008985], [-115.32268301, 66.32946139]]) poly1 = SphPolygon(np.deg2rad(swath_vertices)) poly2 = SphPolygon(np.deg2rad(area_vertices)) poly_inter = poly1.intersection(poly2) self.assertTrue(np.allclose(poly_inter.vertices, np.deg2rad(res))) poly_inter = poly2.intersection(poly1) self.assertTrue(np.allclose(poly_inter.vertices, np.deg2rad(res))) # vertices = np.array([[ -84.54058691, 71.80094043], # [ -74.68557932, 72.16812631], # [ -68.06987203, 72.1333064 ], # [ -63.17961469, 71.96265 ], # [ -59.33392061, 71.73824792], # [ -56.16798418, 71.49047832], # [ -53.46489053, 71.231076 ], # [ -51.08551155, 70.96395329], # [ -48.93484325, 70.68929276], # [ -46.94415494, 70.40519826], # [ -45.06071892, 70.10832093], # [ -43.24140861, 69.7939738 ], # [ -41.44830671, 69.45591086], # [ -39.64527217, 69.08578252], # [ -37.79474271, 68.6721527 ], # [ -35.85408829, 68.1987858 ], # [ -33.7705704 , 67.64156121], # [ -31.47314483, 66.9625364 ], # [ -28.85703847, 66.09736791], # [ -25.74961912, 64.92465312], # [ -21.81516555, 63.17261421], # [ -18.62398733, 62.28633798], # [ -16.93359509, 62.89011263], # [ -15.17161807, 63.47161418], # [ -13.33621801, 64.02936211], # [ -11.42593772, 64.56180886], # [ -9.43979715, 65.0673476 ], # [ -7.37739816, 65.54432277], # [ -5.23903263, 65.99104411], # [ -3.02579085, 66.40580433], # [ -0.73966571, 66.78690012], # [ 1.61635637, 67.13265703], # [ 4.03822468, 67.44145758], # [ 6.52078043, 67.71177166], # [ 9.05775043, 67.94218891], # [ 11.64178394, 68.13145134], # [ 14.26453542, 68.27848476], # [ 16.9167971 , 68.38242749], # [ 19.58867724, 68.44265471], # [ 22.26981526, 68.45879658], # [ 24.94962586, 68.43074943], # [ 27.61755654, 68.35867876], # [ 30.26334172, 68.24301426], # [ 32.87724117, 68.08443684], # [ 35.45024798, 67.88385879], # [ 37.97425437, 67.64239838], # [ 40.44217258, 67.36135027], # [ 42.84800609, 67.04215364], # [ 45.18687531, 66.68635947], # [ 47.45500013, 66.2955988 ], # [ 49.64965026, 65.87155246], # [ 52.34514841, 66.28428851], # [ 56.04377347, 68.57914951], # [ 59.05474396, 70.10401937], # [ 61.66799965, 71.23110288], # [ 64.02929638, 72.12002156], # [ 66.22835251, 72.85391032], # [ 68.32829893, 73.48143318], # [ 70.37866226, 74.03347161], # [ 72.42237212, 74.53085444], # [ 74.50035309, 74.98833047], # [ 76.65524775, 75.41675945], # [ 78.93517067, 75.824363 ], # [ 81.39826053, 76.21741056], # [ 84.11897279, 76.600482 ], # [ 87.19757467, 76.97627542], # [ 90.77537201, 77.3447072 ], # [ 95.06035831, 77.70058684], # [ 100.37229526, 78.02797258], # [ 107.22498444, 78.28582497], # [ 116.481466 , 78.36746171], # [ 129.66805239, 77.96163057], # [ 134.67038545, 78.4115401 ], # [ 136.40302873, 79.30544125], # [ 138.4763311 , 80.18558961], # [ 140.98282558, 81.04796485], # [ 144.04700981, 81.88693584], # [ 147.83664747, 82.6944745 ], # [ 152.57512293, 83.45896996], # [ 158.54810167, 84.16352558], # [ 166.0844409 , 84.78383882], # [ 175.46720475, 85.28657382], # [-173.27937931, 85.6309921 ], # [-160.67741256, 85.77820349], # [-147.84352095, 85.70789809], # [-136.01435526, 85.4301266 ], # [-125.94447471, 84.97922118], # [-117.77450148, 84.39683471], # [-111.28213275, 83.71944226], # [-106.1391311 , 82.97447237], # [-102.03983076, 82.18121521], # [ -98.73868716, 81.3529452 ], # [ -96.04944891, 80.49880811], # [ -93.83359781, 79.62518236], # [ -91.98834044, 78.73659234], # [ -90.43691725, 77.83630659], # [ -89.12142407, 76.92672961], # [ -87.99766337, 76.0096614 ], # [ -87.03148527, 75.08647127], # [ -86.19618441, 74.15821627], # [ -85.47063566, 73.22572391], # [ -84.83794555, 72.28964996]]) # polygon = SphPolygon(np.deg2rad(vertices)) # polygon.invert() # from datetime import datetime # utctime = datetime(2013, 12, 12, 9, 31, 54, 485719) # utctime = datetime(2013, 11, 11, 11, 11) # twi = get_twilight_poly(utctime) # poly_inter_day = twi.intersection(polygon) # twi.invert() # poly_inter_night = twi.intersection(polygon) # import matplotlib.pyplot as plt # from mpl_toolkits.basemap import Basemap # map = Basemap(projection='nsper', lat_0 = 58, lon_0 = 16, # resolution = 'l', area_thresh = 1000.) # map = Basemap(resolution = "l") # map.drawcoastlines() # map.drawcountries() # map.drawmapboundary(fill_color='white') # map.drawmeridians(np.arange(0, 360, 30)) # map.drawparallels(np.arange(-90, 90, 30)) # poly_inter_day.draw(map, "-r") # poly_inter_night.draw(map, "-b") # plt.show() # def test_twilight(self): # """Test the twilight polygon. # """ # from datetime import datetime # utctime = datetime(2013, 3, 20, 12, 0) # print np.rad2deg(get_twilight_poly(utctime).vertices) # vertices = np.array([[0, -90, 180, 90], # [89, 89, 89, 89]]).T def suite(): """The suite for test_spherical """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSCoordinate)) mysuite.addTest(loader.loadTestsFromTestCase(TestCCoordinate)) mysuite.addTest(loader.loadTestsFromTestCase(TestArc)) mysuite.addTest(loader.loadTestsFromTestCase(TestSphericalPolygon)) return mysuite if __name__ == '__main__': unittest.main() pytroll-schedule-0.6.0/trollsched/utils.py000066400000000000000000000131351415440646100207010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2017, 2018 Alexander Maul # # Author(s): # # Alexander Maul # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Utility functions and config reading for the pytroll-scheduler """ import yaml import logging from collections.abc import Mapping from configparser import ConfigParser from trollsched import schedule logger = logging.getLogger("trollsched") def read_yaml_file(file_name): """Read one or more files in to a single dict object.""" if isinstance(file_name, str): file_name = [file_name] conf_dict = {} for file_obj in file_name: with open(file_obj) as fp: tmp_dict = yaml.safe_load(fp) conf_dict = recursive_dict_update(conf_dict, tmp_dict) return conf_dict def recursive_dict_update(d, u): """Recursive dictionary update. Copied from: http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ for k, v in u.items(): if isinstance(v, Mapping): r = recursive_dict_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d def read_config(filename): try: return read_config_yaml(filename) except yaml.parser.ParserError as e: return read_config_cfg(filename) def read_config_cfg(filename): """Read the config file *filename* and replace the values in global variables. """ cfg = ConfigParser() cfg.read(filename) def read_cfg_opts(section): """Read the option:value pairs in one section, converting value to int/float if applicable. """ kv_dict = {} for k, v in cfg.items(section): try: kv_dict[k] = int(v) except Exception: try: kv_dict[k] = float(v) except Exception: kv_dict[k] = v return kv_dict default_params = read_cfg_opts("default") pattern = {} for k, v in cfg.items("pattern"): pattern[k] = v station_list = [] for station_id in default_params["station"].split(","): station_params = read_cfg_opts(station_id) satellites = cfg.get(station_id, "satellites").split(",") sat_list = [] for sat_name in satellites: sat_list.append(schedule.Satellite(sat_name, **read_cfg_opts(sat_name) )) new_station = schedule.Station(station_id, **station_params) new_station.satellites = sat_list station_list.append(new_station) scheduler = schedule.Scheduler(stations=station_list, min_pass=default_params.get("min_pass", 4), forward=default_params.get("forward"), start=default_params.get("start"), dump_url=default_params.get("dump_url", None), patterns=pattern, center_id=default_params.get("center_id", "unknown")) return scheduler def read_config_yaml(filename): """Read the yaml file *filename* and create a scheduler.""" cfg = read_yaml_file(filename) satellites = {sat_name: schedule.Satellite(sat_name, **sat_params) for (sat_name, sat_params) in cfg["satellites"].items()} stations = {} for station_id, station in cfg["stations"].items(): if isinstance(station['satellites'], dict): sat_list = [] for (sat_name, sat_params) in station["satellites"].items(): if sat_params is None: sat_list.append(satellites[sat_name]) else: sat_list.append(schedule.Satellite(sat_name, **sat_params)) else: sat_list = [satellites[sat_name] for sat_name in station['satellites']] new_station = schedule.Station(station_id, **station) new_station.satellites = sat_list stations[station_id] = new_station pattern = {} for k, v in cfg["pattern"].items(): pattern[k] = v sched_params = cfg['default'] plot_parameters = sched_params.get('plot_parameters', {}) plot_title = sched_params.get('plot_title', None) scheduler = schedule.Scheduler(stations=[stations[st_id] for st_id in sched_params['station']], min_pass=sched_params.get('min_pass', 4), forward=sched_params['forward'], start=sched_params['start'], dump_url=sched_params.get('dump_url'), patterns=pattern, center_id=sched_params.get('center_id', 'unknown'), plot_parameters=plot_parameters, plot_title=plot_title) return scheduler pytroll-schedule-0.6.0/trollsched/version.py000066400000000000000000000441121415440646100212250ustar00rootroot00000000000000 # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = " (HEAD -> main, tag: v0.6.0)" git_full = "5f2cb59a0c99ad27643e2615cbf6fd4977e6c3c0" git_date = "2021-12-09 16:05:37 +0200" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "" cfg.versionfile_source = "trollsched/version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} pytroll-schedule-0.6.0/versioneer.py000066400000000000000000002060031415440646100175550ustar00rootroot00000000000000 # Version: 0.18 """The Versioneer - like a rocketeer, but for versions. The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/warner/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other langauges) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ### Unicode version strings While Versioneer works (and is continually tested) with both Python 2 and Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. Newer releases probably generate unicode version strings on py2. It's not clear that this is wrong, but it may be surprising for applications when then write these strings to a network connection or include them in bytes-oriented APIs like cryptographic checksums. [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates this question. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the Creative Commons "Public Domain Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_root(): """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() with open(setup_cfg, "r") as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode LONG_VERSION_PY['git'] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.18) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} def get_version(): """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(): """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: from py2exe.build_exe import py2exe as _py2exe # py2 class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): """Main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)